Ortho-NoS1GBM 增加基础库 baseTool

dev-chenzenghui
chenzenghui 2025-08-15 15:58:24 +08:00
parent 298120f7dd
commit f423827e5f
135 changed files with 13044 additions and 0 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,2 @@
PlatformToolSet=v142:VCToolArchitecture=Native32Bit:VCToolsVersion=14.29.30133:TargetPlatformVersion=10.0.19041.0:VcpkgTriplet=x64-windows:
Release|x64|D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\|

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<Project>
<ProjectOutputs>
<ProjectOutput>
<FullPath>D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\SIMOrthoProgram-S-SAR.exe</FullPath>
</ProjectOutput>
</ProjectOutputs>
<ContentFiles />
<SatelliteDlls />
<NonRecipeFileRefs />
</Project>

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,47 @@
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\SIMOrthoProgram.vcxproj.CopyComplete
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\SIMOrthoProgram-S-SAR.exe
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\concrt140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\msvcp140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\msvcp140_1.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\msvcp140_2.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\msvcp140_atomic_wait.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\msvcp140_codecvt_ids.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\vccorlib140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\vcruntime140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\vcruntime140_1.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\vcamp140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\vcomp140.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\boost_filesystem-vc142-mt-x64-1_82.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\gdal.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\zlib1.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libcrypto-3-x64.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libssl-3-x64.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\liblzma.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\qhull_r.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\jpeg62.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\tiff.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\geotiff.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\proj.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\sqlite3.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libcurl.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libpng16.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\Lerc.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\zstd.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\gif.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\netcdf.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\hdf5_hl.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\hdf5.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libwebp.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libsharpyuv.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\LIBPQ.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\pcre2-8.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libexpat.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\libxml2.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\iconv-2.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\geos_c.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\geos.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\json-c.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\openjp2.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\spatialite.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\freexl-1.dll
D:\estar-proj\SIMOrthoProgram-Orth_GF3-Strip-master\simorthoprogram-orth_s_sar-strip\x64\Release\minizip.dll

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,166 @@
#
# 模型计算的库
#
import cython
cimport cython # 必须导入
import numpy as np
cimport numpy as np
from libc.math cimport pi
from scipy.optimize import leastsq
import random
import logging
logger = logging.getLogger("mylog")
def WMCModel(param_arr,sample_lai,sample_soil,sample_inc,sample_sigma):
""" WMC模型 增加 归一化植被指数
Args:
param_arr (np.ndarray):
sample_lai (double):
sample_soil (double):
sample_inc (double):
sample_sigma (double): 线
Returns:
double:
"""
# 映射参数,方便修改模型
A,B,C,D,M,N=param_arr # 在这里修改模型
V_lai=sample_lai
#V_lai=E*sample_lai+F
exp_gamma=np.exp(-2*B*((V_lai*D+C))*(1/np.cos(sample_inc)))
sigma_soil=M*sample_soil+N
sigma_veg=A*((V_lai))*np.cos(sample_inc)
f_veg=1
result=sigma_veg*(1-exp_gamma)+sigma_soil*exp_gamma-sample_sigma
return result
def train_WMCmodel(lai_water_inc_sigma_list,params_X0,train_err_image_path,draw_flag=True):
""" 训练模型参数
Args:
lai_waiter_inc_sigma_list (list): 使
"""
def f(X):
eqs=[]
for lai_water_inc_sigma_item in lai_water_inc_sigma_list:
sample_lai=lai_water_inc_sigma_item[4]
sample_sigma=lai_water_inc_sigma_item[5] # 5: csv_sigma, 8:tiff_sigma
sample_soil=lai_water_inc_sigma_item[6]
sample_inc=lai_water_inc_sigma_item[7]
FVC=lai_water_inc_sigma_item[8]
eqs.append(WMCModel(X,sample_lai,sample_soil,sample_inc,sample_sigma))
return eqs
X0 = params_X0 # 初始值
# logger.info(str(X0))
h = leastsq(f, X0)
# logger.info(h[0],h[1])
err_f=f(h[0])
x_arr=[lai_waiter_inc_sigma_item[4] for lai_waiter_inc_sigma_item in lai_water_inc_sigma_list]
# 根据误差大小进行排序
# logger.info("训练集:\n根据误差输出点序\n数量{}\n点序\t误差值\t 样点信息".format(str(np.array(err_f).shape)))
# for i in np.argsort(np.array(err_f)):
# logger.info('{}\t{}\t{}'.format(i,err_f[i],str(lai_water_inc_sigma_list[i])))
# logger.info("\n误差点序输出结束\n")
if draw_flag:
# logger.info(err_f)
# logger.info(np.where(np.abs(err_f)<10))
from matplotlib import pyplot as plt
plt.scatter(x_arr,err_f)
plt.title("equation-err")
plt.savefig(train_err_image_path,dpi=600)
plt.show()
return h[0]
def test_WMCModel(lai_waiter_inc_sigma_list,param_arr,lai_X0,test_err_image_path,draw_flag=True):
""" 测试模型训练结果
Args:
lai_waiter_inc_sigma_list (list): 使
A (_type_): A
B (_type_): B
C (_type_): C
D (_type_): D
M (_type_): M
N (_type_): N
lai_X0 (_type_):
Returns:
list: [sample_lai,err,predict]
"""
err=[]
err_f=[]
x_arr=[]
err_lai=[]
for lai_waiter_inc_sigma_item in lai_waiter_inc_sigma_list:
sample_time,sample_code,sample_lon,sample_lat,sample_lai,csv_sigma,sample_soil,sample_inc,sample_sigma=lai_waiter_inc_sigma_item
def f(X):
lai=X[0]
eqs=[WMCModel(param_arr,lai,sample_soil,sample_inc,csv_sigma)]
return eqs
X0=lai_X0
h = leastsq(f, X0)
temp_err=h[0]-sample_lai
err_lai.append(temp_err[0]) # lai预测的插值
err.append([sample_lai,temp_err[0],h[0][0],sample_code])
err_f.append(f(h[0])[0]) # 方程差
x_arr.append(sample_lai)
# 根据误差大小进行排序
# logger.info("测试集:\n根据误差输出点序\n数量{}\n点序\t误差值\t 方程差\t样点信息".format(str(np.array(err_lai).shape)))
# for i in np.argsort(np.array(err_lai)):
# logger.info('{}\t{}\t{}\t{}'.format(i,err_lai[i],err_f[i],str(lai_waiter_inc_sigma_list[i])))
# logger.info("\n误差点序输出结束\n")
if draw_flag:
from matplotlib import pyplot as plt
plt.scatter(x_arr,err_lai)
plt.title("equation-err")
plt.savefig(test_err_image_path,dpi=600)
plt.show()
return err
def processs_WMCModel(param_arr,lai_X0,sigma,inc_angle,soil_water):
if(sigma<0 ):
return np.nan
def f(X):
lai=X[0]
eqs=[WMCModel(param_arr,lai,soil_water,inc_angle,sigma )]
return eqs
h = leastsq(f, [lai_X0])
return h[0][0]
# Cython 的扩展地址
cpdef np.ndarray[double,ndim=2] process_tiff(np.ndarray[double,ndim=2] sigma_tiff,
np.ndarray[double,ndim=2] inc_tiff,
np.ndarray[double,ndim=2] soil_water_tiff,
np.ndarray[double,ndim=1] param_arr,
double lai_X0):
cdef np.ndarray[double,ndim=2] result=sigma_tiff
cdef int param_arr_length=param_arr.shape[0]
cdef int height=sigma_tiff.shape[0]
cdef int width=sigma_tiff.shape[1]
cdef int i=0
cdef int j=0
cdef double temp=0
while i<height:
j=0
while j<width:
temp = processs_WMCModel(param_arr,lai_X0,sigma_tiff[i,j],inc_tiff[i,j],soil_water_tiff[i,j])
temp=temp if temp<10 and temp>=0 else np.nan
result[i,j]=temp
j=j+1
i=i+1
return result

View File

@ -0,0 +1,45 @@
from setuptools import setup
from setuptools.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
import numpy
from pathlib import Path
import shutil
class MyBuildExt(build_ext):
def run(self):
build_ext.run(self)
build_dir = Path(self.build_lib)
root_dir = Path(__file__).parent
target_dir = build_dir if not self.inplace else root_dir
self.copy_file(Path('./LAIProcess') / '__init__.py', root_dir, target_dir)
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
def copy_file(self, path, source_dir, destination_dir):
if not (source_dir / path).exists():
return
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
setup(
name="MyModule",
ext_modules=cythonize(
[
#Extension("pkg1.*", ["root/pkg1/*.py"]),
Extension("pkg2.*", ["./LAIProcess.pyx"]),
#Extension("1.*", ["root/*.py"])
],
build_dir="build",
compiler_directives=dict(
always_allow_keywords=True
)),
cmdclass=dict(
build_ext=MyBuildExt
),
packages=[],
include_dirs=[numpy.get_include()],
)
# 指令: python setup.py build_ext --inplace

View File

@ -0,0 +1,117 @@
# -*- encoding: utf-8 -*-
# code from https://blog.csdn.net/theonegis/article/details/54427906
from osgeo import gdal
from osgeo import osr
import numpy as np
def getSRSPair(dataset):
"""
获得给定数据的投影参考系和地理参考系
:param dataset: GDAL地理数据
:return: 投影参考系和地理参考系
"""
prosrs = osr.SpatialReference()
prosrs.ImportFromWkt(dataset.GetProjection())
geosrs = prosrs.CloneGeogCS()
return prosrs, geosrs
def geo2lonlat(dataset, x, y):
"""
将投影坐标转为经纬度坐标具体的投影坐标系由给定数据确定
:param dataset: GDAL地理数据
:param x: 投影坐标x
:param y: 投影坐标y
:return: 投影坐标(x, y)对应的经纬度坐标(lon, lat)
"""
prosrs, geosrs = getSRSPair(dataset)
ct = osr.CoordinateTransformation(prosrs, geosrs)
coords = ct.TransformPoint(x, y)
return coords[:2]
def lonlat2geo(dataset, lon, lat):
"""
将经纬度坐标转为投影坐标具体的投影坐标系由给定数据确定
:param dataset: GDAL地理数据
:param lon: 地理坐标lon经度
:param lat: 地理坐标lat纬度
:return: 经纬度坐标(lon, lat)对应的投影坐标
"""
prosrs, geosrs = getSRSPair(dataset)
ct = osr.CoordinateTransformation(geosrs, prosrs)
coords = ct.TransformPoint(lat, lon)
return coords[:2]
def imagexy2geo(dataset, row, col):
"""
根据GDAL的六参数模型将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param dataset: GDAL地理数据
:param row: 像素的行号
:param col: 像素的列号
:return: 行列号(row, col)对应的投影坐标或地理坐标(x, y)
"""
trans = dataset.GetGeoTransform()
px = trans[0] + col * trans[1] + row * trans[2]
py = trans[3] + col * trans[4] + row * trans[5]
return px, py
def geo2imagexy(dataset, x, y):
"""
根据GDAL的六 参数模型将给定的投影或地理坐标转为影像图上坐标行列号
:param dataset: GDAL地理数据
:param x: 投影或地理坐标x
:param y: 投影或地理坐标y
:return: 影坐标或地理坐标(x, y)对应的影像图上行列号(col, row)
"""
trans = dataset.GetGeoTransform()
a = np.array([[trans[1], trans[2]], [trans[4], trans[5]]])
b = np.array([x - trans[0], y - trans[3]])
return np.linalg.solve(a, b) # 使用numpy的linalg.solve进行二元一次方程的求解
def test1():
gdal.AllRegister()
tif = 'D:/DATA/testdata/GLCFCS30_E110N25.tif'
# dataset = gdal.Open(r"D:\\DATA\\雷达测试\\GaoFen3_20200528_HH_DB.tif")
dataset = gdal.Open(tif)
print('数据投影:')
print(dataset.GetProjection())
print('数据的大小(行,列):')
print('(%s %s)' % (dataset.RasterYSize, dataset.RasterXSize))
x = 793214.118
y = 2485865.527
lon = 113.84897082317516
lat = 22.453998686022448
row = 24576
col = 22540
print('图上坐标 -> 投影坐标:')
coords = imagexy2geo(dataset, row, col)
print('(%s, %s)->(%s, %s)' % (row, col, coords[0], coords[1]))
print('投影坐标 -> 图上坐标:')
coords = geo2imagexy(dataset, x, y)
col = coords[0]
row = coords[1]
print('(%s, %s)->(%s, %s)' % (x, y, coords[0], coords[1]))
print('投影坐标 -> 经纬度:')
coords = geo2lonlat(dataset, x, y)
print('(%s, %s)->(%s, %s)' % (x, y, coords[0], coords[1]))
print('经纬度 -> 投影坐标:')
coords = lonlat2geo(dataset, lon, lat)
print('(%s, %s)->(%s, %s)' % (lon, lat, coords[0], coords[1]))
coords1 = geo2lonlat(dataset, 657974.118, 2633321.527)
print(coords1)
coords2 = geo2lonlat(dataset, 793214.118, 2485865.527)
print(coords2)
pass
# if __name__ == '__main__':
#
# print('done')

View File

@ -0,0 +1,156 @@
"""
@Project microproduct
@File DEMJoint
@Function 主函数
@Author LMM
@Date 2021/10/19 14:39
@Version 1.0.0
"""
from osgeo import gdal, osr
import os
import numpy as np
class DEMProcess:
"""
DEM拼接重采样
"""
def __init__(self):
pass
@staticmethod
def get_extent(fn):
'''
原文链接https://blog.csdn.net/XBR_2014/article/details/85255412
'''
ds = gdal.Open(fn)
rows = ds.RasterYSize
cols = ds.RasterXSize
# 获取图像角点坐标
gt = ds.GetGeoTransform()
minx = gt[0]
maxy = gt[3]
maxx = gt[0] + gt[1] * rows
miny = gt[3] + gt[5] * cols
return (minx, maxy, maxx, miny)
@staticmethod
def img_mosaic(in_files, out_dem_path):
# 通过两两比较大小,将最终符合条件的四个角点坐标保存
# 即为拼接图像的四个角点坐标
minX, maxY, maxX, minY = DEMProcess.get_extent(in_files[0])
for fn in in_files[1:]:
minx, maxy, maxx, miny = DEMProcess.get_extent(fn)
minX = min(minX, minx)
maxY = max(maxY, maxy)
maxX = max(maxX, maxx)
minY = min(minY, miny)
# 获取输出图像的行列数
in_ds = gdal.Open(in_files[0])
bands_num = in_ds.RasterCount
gt = in_ds.GetGeoTransform()
rows = int((maxX - minX) / abs(gt[5]))
cols = int((maxY - minY) / gt[1])
# 判断栅格数据的数据类型
datatype = gdal.GDT_UInt16
# 创建输出图像
driver = gdal.GetDriverByName('GTiff')
out_dem = os.path.join(out_dem_path, 'mosaic0.tif')
out_ds = driver.Create(out_dem, cols, rows, bands_num, datatype)
out_ds.SetProjection(in_ds.GetProjection())
gt = list(in_ds.GetGeoTransform())
gt[0], gt[3] = minX, maxY
out_ds.SetGeoTransform(gt)
for fn in in_files:
in_ds = gdal.Open(fn)
x_size = in_ds.RasterXSize
y_size = in_ds.RasterYSize
trans = gdal.Transformer(in_ds, out_ds, [])
success, xyz = trans.TransformPoint(False, 0, 0)
x, y, z = map(int, xyz)
for i in range(1, bands_num + 1):
data = in_ds.GetRasterBand(i).ReadAsArray()
out_band = out_ds.GetRasterBand(i)
out_data = out_band.ReadAsArray(x, y, x_size, y_size)
data = np.maximum(data, out_data)
out_band.WriteArray(data, x, y)
del in_ds, out_band, out_ds
@staticmethod
def dem_clip(OutFilePath, DEMFilePath, SelectArea):
'''
根据选择范围裁剪DEM,并输出
agrs:
outFilePath:裁剪DEM输出地址
DEMFilePath:被裁减DEM地址
SelectArea:list [(xmin,ymax),(xmax,ymin)] 框选范围 左上角右下角
'''
DEM_ptr = gdal.Open(DEMFilePath)
DEM_GeoTransform = DEM_ptr.GetGeoTransform() # 读取影像的投影变换
DEM_InvGeoTransform = gdal.InvGeoTransform(DEM_GeoTransform)
SelectAreaArrayPoints = [gdal.ApplyGeoTransform(DEM_InvGeoTransform, p[0], p[1]) for p in SelectArea]
SelectAreaArrayPoints = list(map(lambda p: (int(p[0]), int(p[1])), SelectAreaArrayPoints)) # 确定坐标
[(ulx, uly), (brx, bry)] = SelectAreaArrayPoints
rowCount, colCount = bry - uly, brx - ulx
# 输出DEM的桌面坐标转换
Out_Transfrom = list(DEM_GeoTransform)
Out_Transfrom[0] = SelectArea[0][0]
Out_Transfrom[3] = SelectArea[0][1]
# 构建输出DEM
Bands_num = DEM_ptr.RasterCount
gtiff_driver = gdal.GetDriverByName('GTiff')
datatype = gdal.GDT_UInt16
out_dem = gtiff_driver.Create(OutFilePath, colCount, rowCount, Bands_num, datatype)
out_dem.SetProjection(DEM_ptr.GetProjection())
out_dem.SetGeoTransform(Out_Transfrom)
for i in range(1, Bands_num + 1):
data_band = DEM_ptr.GetRasterBand(i)
out_band = out_dem.GetRasterBand(i)
data = data_band.ReadAsArray(ulx, uly, colCount, rowCount)
out_band.WriteArray(data)
del out_dem
@staticmethod
def dem_resample(in_dem_path, out_dem_path):
'''
DEM重采样函数默认坐标系为WGS84
agrs:
in_dem_path: 输入的DEM文件夹路径
meta_file_path: 输入的xml元文件路径
out_dem_path: 输出的DEM文件夹路径
'''
# 读取文件夹中所有的DEM
dem_file_paths=[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if dem_name.find(".tif")>=0 and dem_name.find(".tif.")==-1]
spatialreference=osr.SpatialReference()
spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
spatialproj=spatialreference.ExportToWkt() # 导出投影结果
# 将DEM拼接成一张大图
mergeFile =gdal.BuildVRT(os.path.join(out_dem_path,"mergeDEM.tif"), dem_file_paths)
out_DEM=os.path.join(out_dem_path,"mosaic.tif")
gdal.Warp(out_DEM,
mergeFile,
format="GTiff",
dstSRS=spatialproj,
dstNodata=-9999,
outputType=gdal.GDT_Float32)
return out_DEM
# if __name__ == "__main__":
# DEMProcess = DEMProcess()
# in_dem_path = r'F:\大气延迟\out_dem'
# out_dem_path = r'F:\大气延迟\out_dem'
# DEMProcess.dem_resample(in_dem_path, out_dem_path)

View File

@ -0,0 +1,179 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File ScatteringAuxData.py
@Function 后向散射
@Author SHJ
@Contact
@Date 2022/6/29
@Version 1.0.0
修改历史
[修改序列] [修改日期] [修改者] [修改内容]
1 2022-6-29 石海军 1.兼容GF3元文件和正射校正元文件提取信息
"""
import json
import logging
from xml.etree.ElementTree import ElementTree
import math
import xmltodict
logger = logging.getLogger("mylog")
class GF3L1AMetaData:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
return QualifyValue
@staticmethod
def get_SubQualifyValue(meta_file_path, polarization, pol_id):
try:
with open(meta_file_path, 'r', encoding='utf-8') as fp:
HeaderFile_dom_str = fp.read()
HeaderFile_dom = xmltodict.parse(HeaderFile_dom_str) # 将XML转成json文本
HeaderFile_dom_json = json.loads(json.dumps(HeaderFile_dom))
QualifyValue = float(HeaderFile_dom_json['product']['imageinfo']['QualifyValue'][pol_id][polarization])
return QualifyValue
except Exception as e:
raise('get QualifyValue failed')
@staticmethod
def get_Kdb(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
Kdb = float(root.find('processinfo').find('CalibrationConst').find(polarization).text) if root.find('processinfo').find('CalibrationConst').find(polarization).text!="NULL" else 0
return Kdb
class OrthoMetaData:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
QualifyValue = float(root.find('l1aInfo').find('imageinfo').find('QualifyValue').find(polarization).text)
return QualifyValue
@staticmethod
def get_Kdb(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
Kdb = float(root.find('l1aInfo').find('processinfo').find('CalibrationConst').find(polarization).text)
return Kdb
@staticmethod
def get_RadarCenterFrequency(meta_file_path):
# 获取微波中心频率
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
RadarCenterFrequency = float(root.find('sensor').find('RadarCenterFrequency').text)
return RadarCenterFrequency
@staticmethod
def get_lamda(meta_file_path):
# 获取微波波长单位m
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
lamda = float(root.find('sensor').find('lamda').text)
return lamda
class MetaDataHandler:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
try:
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
except Exception:
logger.warning('OrthoMetaData.get_QualifyValue() error!')
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
logger.info('GF3L1AMetaData.get_QualifyValue() success!')
return QualifyValue
def get_SubQualifyValue(meta_file_path, polarization, pol_id):
try:
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
except Exception:
logger.warning('OrthoMetaData.get_QualifyValue() error!')
QualifyValue = GF3L1AMetaData.get_SubQualifyValue(meta_file_path, polarization, pol_id)
logger.info('GF3L1AMetaData.get_QualifyValue() success!')
return QualifyValue
@staticmethod
def get_Kdb(meta_file_path, polarization):
try:
Kdb = OrthoMetaData.get_Kdb(meta_file_path, polarization)
except Exception:
logger.warning('OrthoMetaData.get_Kdb() error!')
Kdb = GF3L1AMetaData.get_Kdb(meta_file_path, polarization)
logger.info('GF3L1AMetaData.get_Kdb() success!')
return Kdb
@staticmethod
def get_RadarCenterFrequency(meta_file_path):
# 获取微波中心频率,单位GHz
RadarCenterFrequency = OrthoMetaData.get_RadarCenterFrequency(meta_file_path)
return RadarCenterFrequency
@staticmethod
def get_lamda(meta_file_path):
# 获取微波波长单位m
lamda = OrthoMetaData.get_lamda(meta_file_path)
return lamda
class Calibration:
def __init__(self):
pass
@staticmethod
def get_Calibration_coefficient(meta_file_path, polarization):
calibration = [0, 0, 0, 0]
for i in polarization:
if i == 'HH':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[0] = math.sqrt(data_value)
if i == 'HV':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[1] = math.sqrt(data_value)
if i == 'VH':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[2] = math.sqrt(data_value)
if i == 'VV':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[3] = math.sqrt(data_value)
return calibration
# if __name__ == '__main__':
# A = ScatteringAuxData()
# dir = 'G:\MicroWorkspace\C-SAR\AuxSAR\GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485_geo/'
# path = dir + 'GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml'
# path1 = dir + 'OrthoProduct.meta.xml'
# t1 = A.get_QualifyValue(path, 'HH')
# t2 = A.get_Kdb(path, 'HH')
# t3 = A.get_RadarCenterFrequency(path)
# t4 = A.get_lamda(path)
# pass

View File

@ -0,0 +1,561 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File PreProcess.py
@Function @Function: 坐标转换,坐标系转换图像裁剪重投影重采样
@Author LMM
@Date 2021/8/25 14:17
@Version 1.0.0
"""
from shapely.geometry import Polygon # 导入 gdal库要放到这一句的后面不然会引起错误
from osgeo import gdal
from osgeo import gdalconst
from osgeo import osr
from osgeo import ogr
import os
import cv2
import numpy as np
import shutil
import scipy.spatial.transform
import scipy.spatial.transform._rotation_groups # 用于解决打包错误
import scipy.special.cython_special # 用于解决打包错误
import scipy.spatial.transform._rotation_groups # 解决打包的问题
import shapefile
from shapely.errors import TopologicalError
from tool.algorithm.image.ImageHandle import ImageHandler
import logging
logger = logging.getLogger("mylog")
os.environ['PROJ_LIB'] = os.getcwd()
class PreProcess:
"""
预处理所有的影像配准
"""
def __init__(self):
self._ImageHandler = ImageHandler()
pass
def cal_scopes(self, processing_paras):
# 计算roi
scopes = ()
for key, value in processing_paras.items():
if 'ori_sim' in key:
scopes += (ImageHandler.get_scope_ori_sim(value),)
if(processing_paras['box'] != "" or processing_paras['box'] != "empty"):
scopes += self.box2scope(processing_paras['box'])
return scopes
def cal_scopes_roi(self, processing_paras):
return self.intersect_polygon(self.cal_scopes(processing_paras))
def cut_geoimg(self,workspace_preprocessing_path, para_names_geo, processing_paras):
self.check_img_projection(workspace_preprocessing_path, para_names_geo, processing_paras)
# 计算roi
scopes = self.cal_scopes(processing_paras)
# 计算图像的轮廓,并求相交区域
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
scopes_roi = self.cal_intersect_shp(intersect_shp_path, para_names_geo, processing_paras, scopes)
# 裁剪
# 裁剪图像:裁剪微波图像,裁剪其他图像
cutted_img_paths = self.cut_imgs(workspace_preprocessing_path, para_names_geo, processing_paras, intersect_shp_path)
return cutted_img_paths, scopes_roi
def preprocessing(self, para_names, ref_img_name, processing_paras, workspace_preprocessing_path, workspace_preprocessed_path):
# 读取每一张图像,检查图像坐标系
self.check_img_projection(workspace_preprocessing_path, para_names, processing_paras)
# 计算图像的轮廓,并求相交区域
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
self.cal_intersect_shp(intersect_shp_path, para_names, processing_paras,
self.box2scope(processing_paras['box']))
logger.info('create intersect shp success!')
# 裁剪图像:裁剪微波图像,裁剪其他图像
cutted_img_paths = self.cut_imgs(workspace_preprocessing_path, para_names, processing_paras,
intersect_shp_path)
logger.info('cut images success!')
# 重采样:重采样到跟微波图像一致的分辨率,然后保存到临时目录
preprocessed_paras = self.resampling_img(workspace_preprocessed_path, para_names, cutted_img_paths,cutted_img_paths[ref_img_name])
# 清除预处理缓存文件
logger.info('preprocess_handle success!')
return preprocessed_paras # cutted_img_paths
def get_ref_inf(self, ref_img_path):
"""获取参考影像的图像信息"""
ref_img_path = ref_img_path
cols = ImageHandler.get_img_width(ref_img_path)
rows = ImageHandler.get_img_height(ref_img_path)
proj = ImageHandler.get_projection(ref_img_path)
geo = ImageHandler.get_geotransform(ref_img_path)
return ref_img_path, cols, rows, proj, geo
def check_img_projection(self, out_dir, para_names, processing_paras):
"""
读取每一张图像,检查图像坐标系;
将投影坐标系影像转换为地理坐标系影像(EPSG:4326)
:param para_names:需要检查的参数名称
"""
if len(para_names) == 0:
return False
for name in para_names:
proj = ImageHandler.get_projection(processing_paras[name])
keyword = proj.split("[", 2)[0]
if keyword == "PROJCS":
# 投影坐标系 转 地理坐标系
para_dir = os.path.split(processing_paras[name])
out_para = os.path.join(out_dir, para_dir[1].split(".", 1)[0] + "_EPSG4326.tif")
self.trans_epsg4326(out_para, processing_paras[name])
processing_paras[name] = out_para
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
raise Exception('coordinate is missing!')
def preprocessing_oh2004(self, para_names, processing_paras, workspace_preprocessing_path, workspace_preprocessed_path):
# 读取每一张图像,检查图像坐标系
self.check_img_projection(workspace_preprocessing_path, para_names, processing_paras)
# 计算图像的轮廓,并求相交区域
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
scopes = self.cal_intersect_shp(intersect_shp_path, para_names, processing_paras,
self.box2scope(processing_paras['box']))
logger.info('create intersect shp success!')
# 裁剪图像:裁剪微波图像,裁剪其他图像
cutted_img_paths = self.cut_imgs(workspace_preprocessed_path, para_names, processing_paras,
intersect_shp_path)
logger.info('cut images success!')
# 重采样:重采样到跟微波图像一致的分辨率,然后保存到临时目录
return cutted_img_paths, scopes
@staticmethod
def lonlat2geo(lat, lon):
"""
WGS84转平面坐标
Param: lat 为WGS_1984的纬度
Param: lon 为WGS_1984的经度
输出转换后的坐标x,y
"""
dstsrs1 = osr.SpatialReference()
dstsrs1.ImportFromEPSG(32649)
dstsrs2 = osr.SpatialReference()
dstsrs2.ImportFromEPSG(4326)
ct = osr.CoordinateTransformation(dstsrs2, dstsrs1)
coords = ct.TransformPoint(lat, lon)
# print("输出转换后的坐标x,y:",coords[:2])
return coords[:2]
@staticmethod
def trans_geogcs2projcs(out_path, in_path):
"""
:param out_path:wgs84投影坐标影像保存路径
:param in_path:地理坐标影像输入路径
"""
# 创建文件
if os.path.exists(os.path.split(out_path)[0]) is False:
os.makedirs(os.path.split(out_path)[0])
options = gdal.WarpOptions(format='GTiff', srcSRS='EPSG:4326', dstSRS='EPSG:32649')
gdal.Warp(out_path, in_path, options=options)
@staticmethod
def trans_projcs2geogcs(out_path, in_path):
"""
:param out_path:wgs84地理坐标影像输入路径
:param in_path:wgs84投影坐标影像保存路径
"""
# 创建文件
if os.path.exists(os.path.split(out_path)[0]) is False:
os.makedirs(os.path.split(out_path)[0])
options = gdal.WarpOptions(format='GTiff', srcSRS='EPSG:32649', dstSRS='EPSG:4326')
gdal.Warp(out_path, in_path, options=options)
@staticmethod
def trans_projcs2geogcs(out_path, in_path ,EPSG_src=32649,EPSG_dst=4326):
"""
:param out_path:wgs84地理坐标影像输入路径
:param in_path:wgs84投影坐标影像保存路径
:param EPSG_src:原始投影系
:param EPSG_dst:目标坐标系
"""
str_EPSG_src = 'EPSG:'+ str(EPSG_src)
str_EPSG_dst = 'EPSG:'+ str(EPSG_dst)
# 创建文件
if os.path.exists(os.path.split(out_path)[0]) is False:
os.makedirs(os.path.split(out_path)[0])
options = gdal.WarpOptions(format='GTiff', srcSRS=str_EPSG_src, dstSRS=str_EPSG_dst)
gdal.Warp(out_path, in_path, options=options)
@staticmethod
def trans_epsg4326(out_path, in_path):
OutTile = gdal.Warp(out_path, in_path,
dstSRS='EPSG:4326',
resampleAlg=gdalconst.GRA_Bilinear
)
OutTile = None
return True
@staticmethod
def box2scope(str_box):
roi_box = ()
if str_box == '' or str_box == 'empty':
return roi_box
box_list = [float(num) for num in list(str_box.split(';'))]
if len(box_list) == 4:
roi_box = ([[box_list[2], box_list[1]], [box_list[3], box_list[1]], [box_list[2], box_list[0]],
[box_list[3], box_list[0]]],)
return roi_box
def cal_intersect_shp(self, shp_path, para_names,processing_paras, add_scope =()):
"""
:param shp_path:相交区域矢量文件保存区域
:param para_names:判断相交影像的名称
:return: True or False
"""
scopes = ()
if len(add_scope) != 0:
scopes += add_scope
for name in para_names:
scope_tuple = (self._ImageHandler.get_scope(processing_paras[name]),)
scopes += scope_tuple
for n, scope in zip( range(len(scopes)), scopes):
logging.info("scope" + str(n) + ":%s", scope)
intersect_polygon = self.intersect_polygon(scopes)
if intersect_polygon is None:
logger.error('image range does not overlap!')
raise Exception('create intersect shp fail!')
logging.info("scope roi :%s", intersect_polygon)
if self.write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
raise Exception('create intersect shp fail!')
return intersect_polygon
@staticmethod
def intersect_polygon(scopes_tuple):
"""
功能说明计算多边形相交的区域坐标;注意多边形区域会转变成凸区域再求交
:param scopes_tuple: 输入多个区域坐标的tuple
:return: 多边形相交的区域坐标((x0,y0),(x1,y1),..., (xn,yn))
"""
if len(scopes_tuple) < 2:
logger.error('len(scopes_tuple) < 2')
# return # todo 修改只有单景会出现无法判断相交区域问题
try:
# python四边形对象会自动计算四个点最后四个点顺序为左上 左下 右下 右上 左上
tmp = tuple(scopes_tuple[0])
poly_intersect = Polygon(tmp).convex_hull
for i in range(len(scopes_tuple)-1):
polygon_next = Polygon(tuple(scopes_tuple[i+1])).convex_hull
if poly_intersect.intersects(polygon_next):
poly_intersect = poly_intersect.intersection(polygon_next)
else:
msg = 'Image:' + str(i) + 'range does not overlap!'
logger.error(msg)
return
return list(poly_intersect.boundary.coords)[:-1]
# except shapely.geos.TopologicalError:
except TopologicalError:
logger.error('shapely.geos.TopologicalError occurred!')
return
@staticmethod
def resample_by_gdal(in_path, out_path):
src_ds = gdal.Open(in_path, gdal.GA_ReadOnly)
# 设置目标影像的投影和范围
target_projection = src_ds.GetProjection()
target_geotransform = src_ds.GetGeoTransform()
x_scale = target_geotransform[1]
y_scale = target_geotransform[5]
scale = [x_scale, np.abs(y_scale)]
new_scale = np.max(scale)
dst_geotransform = [target_geotransform[0], new_scale, target_geotransform[2], target_geotransform[3],
target_geotransform[4], -new_scale]
target_x_size = int(src_ds.RasterXSize * x_scale / new_scale) # 假设我们要将影像大小缩小到原来的一半
target_y_size = int(src_ds.RasterYSize * np.abs(y_scale) / new_scale)
# 创建输出驱动
driver = gdal.GetDriverByName('GTiff')
# 创建输出文件
dst_ds = driver.Create(out_path, target_x_size, target_y_size, src_ds.RasterCount,
src_ds.GetRasterBand(1).DataType)
dst_ds.SetGeoTransform(dst_geotransform)
dst_ds.SetProjection(target_projection)
# 执行重采样
gdal.ReprojectImage(src_ds, dst_ds, None, None, gdal.GRA_Bilinear) # 使用双线性插值
# 关闭数据集
dst_ds = None
src_ds = None
@staticmethod
def write_polygon_shp(out_shp_path, point_list, EPSG =32649):
"""
功能说明创建闭环的矢量文件
:param out_shp_path :矢量文件保存路径
:param point_list :装有闭环点的列表[[x0,y0],[x1,y1]...[xn,yn]]
:return: True or False
"""
# 为了支持中文路径,请添加下面这句代码
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "NO")
# 为了使属性表字段支持中文,请添加下面这句
gdal.SetConfigOption("SHAPE_ENCODING", "")
# 注册所有的驱动
ogr.RegisterAll()
# 创建数据这里以创建ESRI的shp文件为例
str_driver_name = "ESRI Shapefile"
o_driver = ogr.GetDriverByName(str_driver_name)
if o_driver is None:
msg = 'driver('+str_driver_name+')is invalid value'
logger.error(msg)
return False
# 创建数据源
if os.path.exists(out_shp_path) and os.path.isfile(out_shp_path): # 如果已存在同名文件
os.remove(out_shp_path) # 则删除之
o_ds = o_driver.CreateDataSource(out_shp_path)
if o_ds is None:
msg = 'create file failed!' + out_shp_path
logger.error(msg)
return False
# 创建图层,创建一个多边形图层
srs = osr.SpatialReference()
#srs.ImportFromEPSG(32649) # 投影坐标系空间参考WGS84
srs.ImportFromEPSG(EPSG) # 地理坐标系EPSG
o_layer = o_ds.CreateLayer("TestPolygon", srs, ogr.wkbPolygon)
if o_layer is None:
msg = 'create coverage failed!'
logger.error(msg)
return False
# 下面创建属性表
# 先创建一个叫FieldID的整型属性
o_field_id = ogr.FieldDefn("FieldID", ogr.OFTInteger)
o_layer.CreateField(o_field_id, 1)
# 再创建一个叫FeatureName的字符型属性字符长度为50
o_field_name = ogr.FieldDefn("FieldName", ogr.OFTString)
o_field_name.SetWidth(100)
o_layer.CreateField(o_field_name, 1)
o_defn = o_layer.GetLayerDefn()
# 创建矩形要素
o_feature_rectangle = ogr.Feature(o_defn)
o_feature_rectangle.SetField(0, 1)
o_feature_rectangle.SetField(1, "IntersectRegion")
# 创建环对象ring
ring = ogr.Geometry(ogr.wkbLinearRing)
for i in range(len(point_list)):
ring.AddPoint(point_list[i][0], point_list[i][1])
ring.CloseRings()
# 创建环对象polygon
geom_rect_polygon = ogr.Geometry(ogr.wkbPolygon)
geom_rect_polygon.AddGeometry(ring)
o_feature_rectangle.SetGeometry(geom_rect_polygon)
o_layer.CreateFeature(o_feature_rectangle)
o_ds.Destroy()
return True
def cut_imgs(self, out_dir, para_names, processing_paras, shp_path):
"""
使用矢量数据裁剪影像
:param para_names:需要检查的参数名称
:param shp_path裁剪的shp文件
"""
if len(para_names) == 0:
return {}
cutted_img_paths = {}
try:
for name in para_names:
input_path = processing_paras[name]
output_path = os.path.join(out_dir, name + '_cut.tif')
self.cut_img(output_path, input_path, shp_path)
cutted_img_paths.update({name: output_path})
logger.info('cut %s success!', name)
except BaseException:
logger.error('cut_img failed!')
return {}
return cutted_img_paths
@staticmethod
def cut_img(output_path, input_path, shp_path):
"""
:param output_path:剪切后的影像
:param input_path:待剪切的影像
:param shp_path:矢量数据
:return: True or False
"""
r = shapefile.Reader(shp_path)
box = r.bbox
input_dataset = gdal.Open(input_path)
gdal.Warp(output_path, input_dataset, format='GTiff', outputBounds=box, cutlineDSName=shp_path, dstNodata=-9999)
# cutlineWhere="FIELD = whatever",
# optionally you can filter your cutline (shapefile) based on attribute values
# select the no data value you like
# ds = None
# do other stuff with ds object, it is your cropped dataset. in this case we only close the dataset.
del input_dataset
return True
def resampling_img(self, out_dir, para_names, img_paths, refer_img_path):
"""
以主影像为参考对影像重采样
:param para_names:需要检查的参数名称
:param img_paths待重采样影像路径
:param refer_img_path参考影像路径
"""
if len(para_names) == 0 or len(img_paths) == 0:
return
prepro_imgs_path = {}
for name in para_names:
img_path = img_paths[name]
output_para = os.path.join(out_dir, name + '_preprocessed.tif') # + name + '_preprocessed.tif'
self.resampling_by_scale(img_path, output_para, refer_img_path)
prepro_imgs_path.update({name: output_para})
logger.info('resampling %s success!', name)
return prepro_imgs_path
@staticmethod
def resampling_by_scale(input_path, target_file, refer_img_path):
"""
按照缩放比例对影像重采样
:param input_path: GDAL地理数据路径
:param target_file: 输出影像
:param refer_img_path:参考影像
:return: True or False
"""
ref_dataset = gdal.Open(refer_img_path)
ref_cols = ref_dataset.RasterXSize # 列数
ref_rows = ref_dataset.RasterYSize # 行数
target_dataset = gdal.Open(input_path)
target_cols = target_dataset.RasterXSize # 列数
target_rows = target_dataset.RasterYSize # 行数
if(ref_cols == target_cols) and (ref_rows == target_rows):
shutil.copyfile(input_path, target_file)
return True
dataset = gdal.Open(input_path)
if dataset is None:
logger.error('resampling_by_scale:dataset is None!')
return False
band_count = dataset.RasterCount # 波段数
if (band_count == 0) or (target_file == ""):
logger.error("resampling_by_scale:Parameters of the abnormal!")
return False
cols = dataset.RasterXSize # 列数
rows = dataset.RasterYSize # 行数
scale_x = ref_cols/cols
scale_y = ref_rows/rows
# rows = dataset.RasterYSize # 行数
# cols = int(cols * scale) # 计算新的行列数
# rows = int(rows * scale)
cols = ref_cols
rows = ref_rows
geotrans = list(dataset.GetGeoTransform())
geotrans[1] = geotrans[1] / scale_x # 像元宽度变为原来的scale倍
geotrans[5] = geotrans[5] / scale_y # 像元高度变为原来的scale倍
if os.path.exists(target_file) and os.path.isfile(target_file): # 如果已存在同名影像
os.remove(target_file) # 则删除之
if not os.path.exists(os.path.split(target_file)[0]):
os.makedirs(os.path.split(target_file)[0])
band1 = dataset.GetRasterBand(1)
data_type = band1.DataType
target = dataset.GetDriver().Create(target_file, xsize=cols, ysize=rows, bands=band_count,
eType=data_type)
target.SetProjection(dataset.GetProjection()) # 设置投影坐标
target.SetGeoTransform(geotrans) # 设置地理变换参数
total = band_count + 1
for index in range(1, total):
# 读取波段数据
data = dataset.GetRasterBand(index).ReadAsArray(buf_xsize=cols, buf_ysize=rows)
out_band = target.GetRasterBand(index)
no_data_value = dataset.GetRasterBand(index).GetNoDataValue() # 获取没有数据的点
if not (no_data_value is None):
out_band.SetNoDataValue(no_data_value)
out_band.WriteArray(data) # 写入数据到新影像中
out_band.FlushCache()
out_band.ComputeBandStats(False) # 计算统计信息
del dataset
del target
return True
@staticmethod
def cv_mean_filter(out_path, in_path, filter_size):
"""
:param out_path:滤波后的影像
:param in_path:滤波前的影像
:param filter_size:滤波尺寸
:return: True or False
"""
proj = ImageHandler.get_projection(in_path)
geotrans = ImageHandler.get_geotransform(in_path)
array = ImageHandler.get_band_array(in_path, 1)
array = cv2.blur(array, (filter_size, filter_size)) # 均值滤波
ImageHandler.write_img(out_path, proj, geotrans, array)
return True
@staticmethod
def check_LocalIncidenceAngle(out_tif_path, in_tif_path):
"""
将角度的无效值设置为nan把角度值转为弧度值
:param out_tif_path:处理后影像路径
:param in_tif_path:处理前影像路径
"""
proj, geo, angle = ImageHandler.read_img(in_tif_path)
angle = angle.astype(np.float32, order='C')
angle[angle == -9999] = np.nan
mean = np.nanmean(angle)
if mean > np.pi:
angle = np.deg2rad(angle)# 角度转弧度
angle[np.where(angle >= 0.5 * np.pi)] = np.nan
angle[np.where(angle < 0)] = np.nan
ImageHandler.write_img(out_tif_path, proj, geo, angle)

View File

@ -0,0 +1,184 @@
# -*- coding: UTF-8 -*-
"""
@Project:microproduct
@File:ROIAlg.py
@Function:
@Contact:
@Author:SHJ
@Date:2021/11/17
@Version:1.0.0
"""
import logging
from tool.algorithm.image.ImageHandle import ImageHandler
from tool.algorithm.algtools.PreProcess import PreProcess as pp
import numpy as np
logger = logging.getLogger("mylog")
class ROIAlg:
def __init__(self,):
pass
@staticmethod
def roi_process(names, processing_path, processing_paras, preprocessed_paras):
roi_paths = []
roi = ROIAlg()
for name in names:
if 'LocalIncidenceAngle' in name:
# 利用角度为nan生成Mask
pp.check_LocalIncidenceAngle(preprocessed_paras[name],preprocessed_paras[name])
angle_nan_mask_path = processing_path + 'angle_nan_mask.tif'
roi.trans_tif2mask(angle_nan_mask_path, preprocessed_paras[name], np.nan)
roi_paths.append(angle_nan_mask_path)
elif ("HH" in name) or ("HV" in name) or ("VH" in name) or ("VV" in name):
# 利用影像的有效范围生成MASK
tif_mask_path = processing_path + name + "_tif_mask.tif"
roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
roi_paths.append(tif_mask_path)
elif name == 'Covering':
# 利用cover计算植被覆盖范围
if processing_paras['CoveringIDs'] == 'empty':
cover_data = ImageHandler.get_data(preprocessed_paras["Covering"])
cover_id_list = list(np.unique(cover_data))
else:
cover_id_list = list(processing_paras['CoveringIDs'].split(';'))
cover_id_list = [int(num) for num in cover_id_list]
cover_mask_path = processing_path + "cover_mask.tif"
roi.trans_cover2mask(cover_mask_path, preprocessed_paras[name], cover_id_list)
roi_paths.append(cover_mask_path)
elif name == "NDVI":
# 利用NDVI计算裸土范围该指数的输出值在 -1.0 和 1.0 之间,大部分表示植被量,
# 负值主要根据云、水和雪而生成
# 接近零的值则主要根据岩石和裸土而生成。
# 较低的(小于等于 0.1NDVI 值表示岩石、沙石或雪覆盖的贫瘠区域。
# 中等值0.2 至 0.3)表示灌木丛和草地
# 较高的值0.6 至 0.8)表示温带雨林和热带雨林。
ndvi_mask_path = processing_path + "ndvi_mask.tif"
ndvi_scope = list(processing_paras['NDVIScope'].split(';'))
threshold_of_ndvi_min = float(ndvi_scope[0])
threshold_of_ndvi_max = float(ndvi_scope[1])
roi.trans_tif2mask(ndvi_mask_path, preprocessed_paras[name], threshold_of_ndvi_min, threshold_of_ndvi_max)
roi_paths.append(ndvi_mask_path)
# else:
# # 其他特征影像
# tif_mask_path = processing_path + name + "_mask.tif"
# roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
# roi_paths.append(tif_mask_path)
bare_land_mask_path = processing_path + "bare_land_mask.tif"
for roi_path in roi_paths:
roi.combine_mask(bare_land_mask_path, roi_path, bare_land_mask_path)
return bare_land_mask_path
@staticmethod
def trans_tif2mask(out_mask_path, in_tif_path, threshold_min, threshold_max = None):
"""
:param out_mask_path:mask输出路径
:param in_tif_path:输入路径
:param threshold_min:最小阈值
:param threshold_max:最大阈值
:return: True or False
"""
image_handler = ImageHandler()
proj = image_handler.get_projection(in_tif_path)
geotrans = image_handler.get_geotransform(in_tif_path)
array = image_handler.get_band_array(in_tif_path, 1)
if threshold_max == None and np.isnan(threshold_min)==True:
nan = np.isnan(array)
mask = (nan.astype(int) == 0).astype(int)
mask1 = ((array == -9999).astype(int) == 0).astype(int)
mask *= mask1
image_handler.write_img(out_mask_path, proj, geotrans, mask)
else:
if threshold_min < threshold_max:
mask = ((array > threshold_min) & (array < threshold_max)).astype(int)
image_handler.write_img(out_mask_path, proj, geotrans, mask)
elif threshold_min > threshold_max:
mask = ((array < threshold_min) & (array > threshold_max)).astype(int)
image_handler.write_img(out_mask_path, proj, geotrans, mask)
elif threshold_max == threshold_min:
mask = ((array == threshold_min).astype(int) == 0).astype(int)
image_handler.write_img(out_mask_path, proj, geotrans, mask)
logger.info("trans_tif2mask success, path: %s", out_mask_path)
return True
@staticmethod
def trans_cover2mask(out_mask_path, in_tif_path, cover_id_list):
"""
:param out_mask_path:mask输出路径
:param in_tif_path:输入路径
:param cover_id_list 地表覆盖类型数据的id
:return: True or False
"""
image_handler = ImageHandler()
proj = image_handler.get_projection(in_tif_path)
geotrans = image_handler.get_geotransform(in_tif_path)
array = image_handler.get_band_array(in_tif_path, 1)
mask = np.zeros(array.shape, dtype=bool)
for id in cover_id_list:
mask_tmp = (array == id)
mask = mask | mask_tmp
mask = mask.astype(int)
image_handler.write_img(out_mask_path, proj, geotrans, mask)
@staticmethod
def combine_mask(out_mask_path, in_main_mask_path, in_sub_mask_path):
"""
:param out_mask_path:输出路径
:param in_main_mask_path:主mask路径输出影像采用主mask的地理信息
:param in_sub_mask_path:副mask路径
"""
image_handler = ImageHandler()
proj = image_handler.get_projection(in_main_mask_path)
geotrans = image_handler.get_geotransform(in_main_mask_path)
main_array = image_handler.get_band_array(in_main_mask_path, 1)
if image_handler.get_dataset(in_sub_mask_path) != None:
sub_array = image_handler.get_band_array(in_sub_mask_path, 1)
main_array = main_array * sub_array
image_handler.write_img(out_mask_path, proj, geotrans, main_array)
logger.info("combine_mask success, path: %s", out_mask_path)
return True
@staticmethod
def cal_roi(out_tif_path, in_tif_path, mask_path, background_value=1):
"""
:param out_tif_path:ROI的影像
:param in_tif_path:计算ROI的影像
:param mask_path:掩模
:param background_value:无效区域设置的背景值
:return: True or False
"""
image_handler = ImageHandler()
proj = image_handler.get_projection(in_tif_path)
geotrans = image_handler.get_geotransform(in_tif_path)
tif_array = image_handler.get_data(in_tif_path) # 读取所有波段的像元值存为数组
mask_array = image_handler.get_band_array(mask_path, 1)
if len(tif_array.shape) == 3:
im_bands, im_height, im_width = tif_array.shape
else:
im_bands, (im_height, im_width) = 1, tif_array.shape
if im_bands == 1:
tif_array[np.isnan(mask_array)] = background_value
tif_array[mask_array == 0] = background_value
elif im_bands>1:
for i in range(0, im_bands):
tif_array[i, :, :][np.isnan(mask_array)] = background_value
tif_array[i, :, :][mask_array == 0] = background_value
image_handler.write_img(out_tif_path, proj, geotrans, tif_array, '0')
logger.info("cal_roi success, path: %s", out_tif_path)
return True
# if __name__ == '__main__':
# dir = r'G:\MicroWorkspace\C-SAR\SoilMoisture\Temporary\processing/'
# out_tif_path = dir + 'soil_moisture_roi.tif'
# in_tif_path = dir + 'soil_moisture.tif'
# mask_path = dir + 'bare_land_mask.tif'
# background_value = np.nan
# ROIAlg.cal_roi(out_tif_path, in_tif_path, mask_path, background_value)
# pass

View File

@ -0,0 +1,57 @@
# -*- coding: UTF-8 -*-
"""
@Project:__init__.py
@File:sieve_filter.py
@Function:gdal斑点滤波功能
@Contact: 'https://www.osgeo.cn/gdal/api/gdal_alg.html?highlight=gdalsievefilter#'
'_CPPv415GDALSieveFilter15GDALRasterBandH15GDALRasterBandH15GDALRasterBandHiiPPc16GDALProgressFuncPv'
@Author:SHJ
@Date:2021/8/30 8:42
@Version:1.0.0
"""
import logging
from osgeo import gdal
import numpy as np
# from onestar.soilMoisture.OneMoistureImage import ImageHandler
from tool.algorithm.image.ImageHandle import ImageHandler
logger = logging.getLogger("mylog")
def gdal_sieve_filter(dst_filename, src_filename, threshold=100, connectedness=4):
"""
基于python GDAL栅格滤波
:param dst_filename: 输出滤波后的影像
:param src_filename: 输入需要处理的文件
:param threshold: 滤波的值大小
:param connectedness: 连通域, 范围4或者8
:return:
"""
# 4表示对角像素不被视为直接相邻用于多边形成员资格8表示对角像素不相邻
# connectedness = 4
gdal.AllRegister()
# print('需要处理滤波的栅格文件:{},阈值(分辨率):{}'.format(src_filename, threshold))
dataset = gdal.Open(src_filename, gdal.GA_Update)
if dataset is None:
logger.error('{}open tif fail!'.format(src_filename))
return False
# 获取需要处理的源栅格波段
src_band = dataset.GetRasterBand(1)
mask_band = src_band.GetMaskBand()
dst_band = src_band
prog_func = gdal.TermProgress_nocb
# 调用gdal滤波函数
result = gdal.SieveFilter(src_band, mask_band, dst_band, threshold, connectedness, callback=prog_func)
if result != 0:
return False
proj = dataset.GetProjection()
geotransform = dataset.GetGeoTransform()
dst_array = dst_band.ReadAsArray(0, 0, dst_band.XSize, dst_band.YSize)
ImageHandler.write_img(dst_filename, proj, geotransform, dst_array)
del dataset
return True
#
# if __name__ == '__main__':
# inputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25.tif'
# outputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25_sieve_filter.tif'
# flag = gdal_sieve_filter(outputfile, inputfile, threshold=100, connectedness=4)

View File

@ -0,0 +1,122 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File ScatteringAuxData.py
@Function 后向散射
@Author SHJ
@Contact
@Date 2022/6/29
@Version 1.0.0
修改历史
[修改序列] [修改日期] [修改者] [修改内容]
1 2022-6-29 石海军 1.兼容GF3元文件和正射校正元文件提取信息
"""
import logging
from xml.etree.ElementTree import ElementTree
logger = logging.getLogger("mylog")
class GF3L1AMetaData:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
return QualifyValue
@staticmethod
def get_Kdb(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
Kdb = float(root.find('processinfo').find('CalibrationConst').find(polarization).text)
return Kdb
class OrthoMetaData:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
QualifyValue = float(root.find('l1aInfo').find('imageinfo').find('QualifyValue').find(polarization).text)
return QualifyValue
@staticmethod
def get_Kdb(meta_file_path, polarization):
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
Kdb = float(root.find('l1aInfo').find('processinfo').find('CalibrationConst').find(polarization).text)
return Kdb
@staticmethod
def get_RadarCenterFrequency(meta_file_path):
# 获取微波中心频率
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
RadarCenterFrequency = float(root.find('sensor').find('RadarCenterFrequency').text)
return RadarCenterFrequency
@staticmethod
def get_lamda(meta_file_path):
# 获取微波波长单位m
tree = ElementTree()
tree.parse(meta_file_path)
root = tree.getroot()
lamda = float(root.find('sensor').find('lamda').text)
return lamda
class ScatteringAuxData:
def __init__(self):
pass
@staticmethod
def get_QualifyValue(meta_file_path, polarization):
try:
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
except Exception:
logger.warning('OrthoMetaData.get_QualifyValue() error!')
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
logger.info('GF3L1AMetaData.get_QualifyValue() success!')
return QualifyValue
@staticmethod
def get_Kdb(meta_file_path, polarization):
try:
Kdb = OrthoMetaData.get_Kdb(meta_file_path, polarization)
except Exception:
logger.warning('OrthoMetaData.get_Kdb() error!')
Kdb = GF3L1AMetaData.get_Kdb(meta_file_path, polarization)
logger.info('GF3L1AMetaData.get_Kdb() success!')
return Kdb
@staticmethod
def get_RadarCenterFrequency(meta_file_path):
# 获取微波中心频率,单位GHz
RadarCenterFrequency = OrthoMetaData.get_RadarCenterFrequency(meta_file_path)
return RadarCenterFrequency
@staticmethod
def get_lamda(meta_file_path):
# 获取微波波长单位m
lamda = OrthoMetaData.get_lamda(meta_file_path)
return lamda
# if __name__ == '__main__':
# A = ScatteringAuxData()
# dir = 'G:\MicroWorkspace\C-SAR\AuxSAR\GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485_geo/'
# path = dir + 'GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml'
# path1 = dir + 'OrthoProduct.meta.xml'
# t1 = A.get_QualifyValue(path, 'HH')
# t2 = A.get_Kdb(path, 'HH')
# t3 = A.get_RadarCenterFrequency(path)
# t4 = A.get_lamda(path)
# pass

View File

@ -0,0 +1,414 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File CalculateIncident.py
@Function 计算局部入射角计算
@Author LMM
@Date 2021/8/25 14:17
@Version 1.0.0
"""
import os
import numpy as np
from osgeo import gdal
from osgeo import gdalconst
import gc
import math
from xml.dom import minidom # 不需要安装,默认环境里就有
class CalculateIncident:
def __init__(self):
pass
@staticmethod
def add_round(npgrid):
"""
边缘填充一圈,然后输出填充得到的矩阵
paramnpgrid dem数组
"""
ny, nx = npgrid.shape # ny:行数nx:列数
zbc = np.zeros((ny + 2, nx + 2))
zbc[1:-1, 1:-1] = npgrid
# 四边
zbc[0, 1:-1] = npgrid[0, :]
zbc[-1, 1:-1] = npgrid[-1, :]
zbc[1:-1, 0] = npgrid[:, 0]
zbc[1:-1, -1] = npgrid[:, -1]
# 角点
zbc[0, 0] = npgrid[0, 0]
zbc[0, -1] = npgrid[0, -1]
zbc[-1, 0] = npgrid[-1, 0]
zbc[-1, -1] = npgrid[-1, -1]
print("输出填充后的数组的形状", zbc.shape)
return zbc
@staticmethod
def cal_dxdy(zbc, dx):
"""
计算dx,dy
paramzbc填充后的数组
paramdx dem数据像元大小
"""
we_x = ((zbc[1:-1, :-2]) - (zbc[1:-1, 2:])) / dx / 2 # WE方向
ns_y = ((zbc[2:, 1:-1]) - (zbc[:-2, 1:-1])) / dx / 2 # NS方向
print("输出Sx的数组的形状", we_x.shape, "输出Sy的数组的形状", ns_y.shape)
sx = we_x[1:-1, 1:-1]
sy = ns_y[1:-1, 1:-1]
# np.savetxt("dxdy.csv",dx,delimiter=",")
print("输出Sx2的数组的形状", sx.shape, "输出Sy2的数组的形状", sy.shape)
return sx, sy
@staticmethod
def cal_slopasp(dx, dy):
# 计算坡度\坡向
# 坡度计算 slope
slope = (np.arctan(np.sqrt(dx * dx + dy * dy))) * 57.29578 # 转换成°,57.29578=180/math.pi
slope = slope[1:-1, 1:-1]
# 坡向计算 aspect
aspect = np.ones([dx.shape[0], dx.shape[1]]).astype(np.float32) # 生成一个全是0的数组
# dx = dx.astype(np.float32)
# dy = dy.astype(np.float32)
# a1=(np.where(dx==0) and np.where(dy ==0))
# print(a1)
# aspect[a1]=-1
# a2 = (np.where(dx == 0) and np.where(dy > 0))
# aspect[a2] =0.0
# a3 = (np.where(dx == 0) and np.where(dy <0))
# aspect[a3] =180.0
# a4 = (np.where(dx > 0) and np.where(dy ==0))
# aspect[a4] =90.0
# a5 = (np.where(dx < 0) and np.where(dy ==0))
# aspect[a5] =270.0
# a6 = (np.where(dx != 0) or np.where(dy !=0))
# b=dy[a6]
# print(":", 1)
# aspect[a6] =float(math.atan2(dy[i, j], dx[i, j])) * 57.29578
# a7=np.where(aspect[a6]< 0.0)
# aspect[a7] = 90.0 - aspect[a7]
# a8=np.where(aspect[a6]> 90.0)
# aspect[a8] = 450.0- aspect[a8]
# a9 =np.where(aspect[a6] >= 0 or aspect[a6] <= 90)
# aspect[a9] =90.0 - aspect[a9]
for i in range(dx.shape[0]):
for j in range(dx.shape[1]):
x = float(dx[i, j])
y = float(dy[i, j])
if (x == 0.0) & (y == 0.0):
aspect[i, j] = -1
elif x == 0.0:
if y > 0.0:
aspect[i, j] = 0.0
else:
aspect[i, j] = 180.0
elif y == 0.0:
if x > 0.0:
aspect[i, j] = 90.0
else:
aspect[i, j] = 270.0
else:
aspect[i, j] = float(math.atan2(y, x)) * 57.29578 # 范围(-Π/2Π/2
if aspect[i, j] < 0.0:
aspect[i, j] = 90.0 - aspect[i, j]
elif aspect[i, j] > 90.0:
aspect[i, j] = 450.0 - aspect[i, j]
else:
aspect[i, j] = 90.0 - aspect[i, j]
print("输出aspect形状:", aspect.shape) # 3599, 3599
print("输出aspect:", aspect)
return slope, aspect
def creat_twofile(self, dem_file_path, slope_out_path, aspect_out_path):
"""
生成坡度图坡向图
param: path_file1 为输入文件tif数据的文件路径
"""
if os.path.isfile(dem_file_path):
print("高程数据文件存在")
else:
print("高程数据文件不存在")
dataset_caijian = gdal.Open(dem_file_path)
x_size = dataset_caijian.RasterXSize
y_size = dataset_caijian.RasterYSize
geo = dataset_caijian.GetGeoTransform()
pro = dataset_caijian.GetProjection()
array0 = dataset_caijian.ReadAsArray(0, 0, x_size, y_size)
print("输出dem数据的数组", array0)
zbc = self.add_round(array0)
sx, sy = self.cal_dxdy(zbc, 30)
slope, aspect = self.cal_slopasp(sx, sy)
driver = gdal.GetDriverByName("GTiff") # 创建一个数据格式
driver.Register()
newfile = driver.Create(slope_out_path, x_size, y_size, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
newfile.SetProjection(pro)
geo = [geo[0], geo[1], 0, geo[3], 0, -geo[1]]
newfile.SetGeoTransform(geo)
newfile.GetRasterBand(1).WriteArray(slope)
driver2 = gdal.GetDriverByName("GTiff") # 创建一个数据格式
driver2.Register()
newfile2 = driver2.Create(aspect_out_path, x_size, y_size, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
geo = [geo[0], geo[1], 0, geo[3], 0, -geo[1]]
newfile2.SetGeoTransform(geo)
newfile2.GetRasterBand(1).WriteArray(aspect)
@staticmethod
def resampling(input_file1, input_file2, ref_file, output_file, output_file2):
"""
采用gdal.Warp()方法进行重采样差值法为双线性插值
:param input_file1 slope path
:param input_file2 aspect path
:param ref_file: 参考图像路径
:param output_file: slope path
:param output_file2 aspect path
:return:
"""
gdal.AllRegister()
in_ds1 = gdal.Open(input_file1)
in_ds2 = gdal.Open(input_file2)
ref_ds = gdal.Open(ref_file, gdal.GA_ReadOnly)
# 获取输入影像信息
input_file_proj = in_ds1.GetProjection()
# inputefileTrans = in_ds1.GetGeoTransform()
reference_file_proj = ref_ds.GetProjection()
reference_file_trans = ref_ds.GetGeoTransform()
nbands = in_ds1.RasterCount
bandinputfile1 = in_ds1.GetRasterBand(1)
bandinputfile2 = in_ds2.GetRasterBand(1)
x = ref_ds.RasterXSize
y = ref_ds.RasterYSize
# 创建重采样输出文件(设置投影及六参数)
driver1 = gdal.GetDriverByName('GTiff')
output1 = driver1.Create(output_file, x, y, nbands, bandinputfile1.DataType)
output1.SetGeoTransform(reference_file_trans)
output1.SetProjection(reference_file_proj)
# options = gdal.WarpOptions(srcSRS=inputProj, dstSRS=referencefileProj, resampleAlg=gdalconst.GRA_Bilinear)
# resampleAlg = gdalconst.GRA_NearestNeighbour
gdal.ReprojectImage(in_ds1, output1, input_file_proj, reference_file_proj, gdalconst.GRA_Bilinear)
driver2 = gdal.GetDriverByName('GTiff')
output2 = driver2.Create(output_file2, x, y, nbands, bandinputfile2.DataType)
output2.SetGeoTransform(reference_file_trans)
output2.SetProjection(reference_file_proj)
# options = gdal.WarpOptions(srcSRS=inputProj, dstSRS=referencefileProj, resampleAlg=gdalconst.GRA_Bilinear)
# resampleAlg = gdalconst.GRA_NearestNeighbour
gdal.ReprojectImage(in_ds2, output2, input_file_proj, reference_file_proj, gdalconst.GRA_Bilinear)
@staticmethod
def getorbitparameter(xml_path):
"""
从轨道参数文件xml中获取升降轨信息影像四个角的经纬度坐标
"""
# 打开xml文档,根据路径初始化DOM
doc = minidom.parse(xml_path)
# 得到xml文档元素对象,初始化root对象
root = doc.documentElement
# 输出升降轨信息DEC降轨ASC升轨
direction = root.getElementsByTagName("Direction")[0]
# print("输出Direction的子节点列表",Direction.firstChild.data)
pd = direction.firstChild.data
imageinfo = root.getElementsByTagName("imageinfo")[0]
# 输出topLeft的纬度和经度
top_left = imageinfo.getElementsByTagName("topLeft")[0]
latitude = top_left.getElementsByTagName("latitude")[0]
longitude = top_left.getElementsByTagName("longitude")[0]
# print("输出topLeft的纬度lat和经度lon", latitude.firstChild.data,longitude.firstChild.data)
tl_lat, tl_lon = latitude.firstChild.data, longitude.firstChild.data
# 输出topRight的纬度和经度
top_right = imageinfo.getElementsByTagName("topRight")[0]
latitude = top_right.getElementsByTagName("latitude")[0]
longitude = top_right.getElementsByTagName("longitude")[0]
# print("输出topRight的纬度lat和经度lon", latitude.firstChild.data,longitude.firstChild.data)
tr_lat, tr_lon = latitude.firstChild.data, longitude.firstChild.data
# 输出 bottomLeft的纬度和经度
bottom_left = imageinfo.getElementsByTagName("bottomLeft")[0]
latitude = bottom_left.getElementsByTagName("latitude")[0]
longitude = bottom_left.getElementsByTagName("longitude")[0]
# print("输出bottomLeft的纬度lat和经度lon", latitude.firstChild.data,longitude.firstChild.data)
bl_lat, bl_lon = latitude.firstChild.data, longitude.firstChild.data
# 输出 bottomRight的纬度和经度
bottom_right = imageinfo.getElementsByTagName("bottomRight")[0]
latitude = bottom_right.getElementsByTagName("latitude")[0]
longitude = bottom_right.getElementsByTagName("longitude")[0]
# print("输出bottomRight的纬度lat和经度lon", latitude.firstChild.data,longitude.firstChild.data)
br_lat, br_lon = latitude.firstChild.data, longitude.firstChild.data
print("pd,tl_lat,tl_lon,tr_lat,tr_lon,bl_lat,bl_lon,br_lat,br_lon", pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat,
bl_lon, br_lat, br_lon)
return pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon
def get_rparademeter(self, xml_path):
"""
计算雷达视线向方向角R
"""
pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon = self.getorbitparameter(xml_path)
tl_lat = float(tl_lat) # 原来的数是带有小数点的字符串int会报错使用float
tl_lon = float(tl_lon)
# tr_lat = float(tr_lat)
# tr_lon = float(tr_lon)
bl_lat = float(bl_lat)
bl_lon = float(bl_lon)
# br_lat = float(br_lat)
# br_lon = float(br_lon)
if pd == "DEC":
# 降轨
b = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
r = 270 + b
return r
# tl_lat, tl_lon = lonlat2geo(tl_lat, tl_lon)
# tr_lat, tr_lon = lonlat2geo(tr_lat, tr_lon)
# bl_lat, bl_lon = lonlat2geo(bl_lat, bl_lon)
# br_lat, br_lon = lonlat2geo(br_lat, br_lon)
# B2 = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
# R2 = 270 + B2
# print(("输出R2", R2))
if pd == "ASC":
# 升轨
b = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
return b
def clau(self, pathfile1, pathfile2, pathfile3, xml_path, save_localangle_path):
"""
计算局部入射角
param: pathfile1是slope的坡度图路径
param: pathfile2是aspect的坡向图路径
param: pathfile3是入射角文件的路径
param: xml_path是轨道参数文件
r是雷达视线向方位角
"""
r = self.get_rparademeter(xml_path)
pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon = self.getorbitparameter(xml_path)
print("输出升降轨:", pd)
dataset = gdal.Open(pathfile1)
x = dataset.RasterXSize
y = dataset.RasterYSize
print("输出slope的行、列", x, y)
slope_array = dataset.ReadAsArray(0, 0, x, y)
dataset2 = gdal.Open(pathfile2)
x2 = dataset2.RasterXSize
y2 = dataset2.RasterYSize
print("输出aspect的行、列", x2, y2)
aspect_array = dataset2.ReadAsArray(0, 0, x2, y2)
dataset3 = gdal.Open(pathfile3)
x3 = dataset3.RasterXSize
y3 = dataset3.RasterYSize
geo3 = dataset3.GetGeoTransform()
pro3 = dataset3.GetProjection()
print("输出入射角文件的行、列:", x3, y3)
rushe_array = dataset3.ReadAsArray(0, 0, x3, y3)
# b0 = np.where(rushe_array > 0.00001, 0, 1)
radina_value = 0
if pd == "DEC":
# 降轨数据
# 雷达视线角-坡度角在90度到270度之间
where_0 = np.where(rushe_array == 0)
bb1 = (r-aspect_array).all() and (r-aspect_array).all()
bb2 = np.where(90 < bb1 < 270, 1, 0)
b1 = (bb1 and bb2)
# b1 = np.where(90 < ((r-aspect_array).all()) and ((r-aspect_array).all()) < 270, 1, 0)
c1 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) - np.sin(slope_array*(math.pi/180)) * np.sin(
rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
d1 = b1 * c1
# 雷达视线角-坡度角=90度或=270度时
b2 = np.where((r-aspect_array == 90) | (r-aspect_array == 270), 1, 0)
d2 = b2*c1
# 雷达视线角-坡度角在90度到270度之间
b3 = 1-b1-b2
c3 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) + np.sin(
slope_array*(math.pi/180)) * np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
d3 = b3 * c3
del b1, b2, b3, c3, c1
gc.collect()
radina_value = d1 + d2 + d3
radina_value[where_0] = 0
del d1, d2, d3
gc.collect()
if pd == "ASC":
# 升轨数据
# 坡度-雷达视线角在90度到270度之间
where_0 = np.where(rushe_array == 0)
bb1 = (r-aspect_array).all() and (r-aspect_array).all()
bb2 = np.where(90 < bb1 < 270, 1, 0)
b1 = (bb1 and bb2)
# b1 = np.where(90 < ((r-aspect_array).all()) and ((r-aspect_array).all()) < 270, 1, 0)
c1 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) + np.sin(
slope_array*(math.pi/180)) * np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
d1 = b1 * c1
# 坡度-雷达视线角=90或=270时
b2 = np.where((aspect_array-r == 90) | (aspect_array-r == 270), 1, 0)
d2 = b2 * c1
# 坡度-雷达视线角在0-90度或270-360度之间
b3 = 1 - b1-b2
c3 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) - np.sin(slope_array*(math.pi/180)) *\
np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
d3 = b3 * c3
radina_value = d1 + d2 + d3
radina_value[where_0] = 0
del b1, b2, b3, c3, c1, d1, d2, d3
gc.collect()
jubu_o = 57.29578 * np.arccos(radina_value)
print("输出局部入射角", jubu_o)
driver = gdal.GetDriverByName("GTiff") # 创建一个数据格式
driver.Register()
newfile = driver.Create(save_localangle_path, x3, y3, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
newfile.SetProjection(pro3)
newfile.SetGeoTransform(geo3)
newfile.GetRasterBand(1).WriteArray(jubu_o)
def localangle(self, dem_path, incidence_angle_path, orbital_parameters_path):
"""
获取输入文件的路径
计算坡度图坡向图
计算局部入射角
"""
para_names = ["Dem", "IncidenceAngle", "OrbitalParameters", "经验A"]
if len(para_names) == 0:
return False
# 获取三个文件的路径
# print("输出三个文件路径",Dem_path,IncidenceAngle_path,OrbitalParameters_path)
# 确定坡度、坡向的输出路径,输出坡度、坡向图
slope_out_path = r"D:\MicroWorkspace\LeafAreaIndex\Temporary\UnClipslope.tif"
aspect_out_path = r"D:\MicroWorkspace\LeafAreaIndex\Temporary\UnClipaspect.tif"
print("slope_out_path的路径是", slope_out_path)
print("aspect_out_path的路径是", aspect_out_path)
self.creat_twofile(dem_path, slope_out_path, aspect_out_path)
# 根据入射角文件对坡度坡向图进行裁剪与重采样
slope_out_path2 = r"D:\MicroWorkspace\LocaLangle\Temporary\Clipslope.tif"
aspect_out_path2 = r"D:\MicroWorkspace\LocaLangle\Temporary\Clipaspect.tif"
self.resampling(slope_out_path, aspect_out_path, incidence_angle_path, slope_out_path2, aspect_out_path2)
# 输出局部入射角文件
save_localangle_path = r"D:\\MicroWorkspace\\LocaLangle\\Temporary\\\localangle.tif"
self.clau(slope_out_path2, aspect_out_path2, incidence_angle_path,
orbital_parameters_path, save_localangle_path)
# if __name__ == '__main__':
# calu_incident = CalculateIncident()
# Dem_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\dem.tif"
# IncidenceAngle_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\RSJ.tif"
# OrbitalParameters_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\" \
# "GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml"
# calu_incident.localangle(Dem_path, IncidenceAngle_path, OrbitalParameters_path)
# print('done')

View File

@ -0,0 +1,302 @@
# -*- coding: UTF-8 -*-
"""
@Project:__init__.py
@File:lee_filter.py
@Function:lee_filter
@Contact: https://github.com/PyRadar/pyradar
@Author:SHJ
@Date:2021/8/30 8:42
@Version:1.0.0
"""
import numpy as np
import math
from PIL import Image
import multiprocessing
import multiprocessing
from tool.algorithm.block.blockprocess import BlockProcess
import logging
import shutil
from tool.algorithm.image.ImageHandle import ImageHandler
from tool.file.fileHandle import fileHandle
from tool.algorithm.algtools.filter import lee_Filter_c as lee_Filter_c
logger = logging.getLogger("mylog")
file =fileHandle(False)
COEF_VAR_DEFAULT = 0.01
CU_DEFAULT = 0.25
import os
class Filter:
def __int__(self):
pass
@staticmethod
def assert_window_size(win_size):
"""
Asserts invalid window size.
Window size must be odd and bigger than 3.
"""
assert win_size >= 3, 'ERROR: win size must be at least 3'
if win_size % 2 == 0:
print('It is highly recommended to user odd window sizes.'
'You provided %s, an even number.' % (win_size, ))
@staticmethod
def assert_indices_in_range(width, height, xleft, xright, yup, ydown):
"""
Asserts index out of image range.
"""
# assert xleft >= 0 and xleft <= width, \
assert 0 <= xleft <= width, \
"index xleft:%s out of range (%s<= xleft < %s)" % (xleft, 0, width)
# assert xright >= 0 and xright <= width, \
assert 0 <= xright <= width, "index xright:%s out of range (%s<= xright < %s)" % (xright, 0, width)
# assert yup >= 0 and yup <= height, \
assert 0 <= yup <= height, "index yup:%s out of range. (%s<= yup < %s)" % (yup, 0, height)
# assert ydown >= 0 and ydown <= height, \
assert 0 <= ydown <= height, "index ydown:%s out of range. (%s<= ydown < %s)" % (ydown, 0, height)
@staticmethod
def weighting(window, cu=CU_DEFAULT):
"""
Computes the weighthing function for Lee filter using cu as the noise
coefficient.
"""
# cu is the noise variation coefficient
two_cu = cu * cu
# ci is the variation coefficient in the window
window_mean = window.mean()
window_std = window.std()
ci = window_std / window_mean
two_ci = ci * ci
if not two_ci: # dirty patch to avoid zero division
two_ci = COEF_VAR_DEFAULT
if cu > ci:
w_t = 0.0
else:
w_t = 1.0 - (two_cu / two_ci)
return w_t
def lee_filter(self, in_path, out_path, win_size):
"""
Apply lee to a numpy matrix containing the image, with a window of
win_size x win_size.
"""
cu = CU_DEFAULT
self.assert_window_size(win_size)
# img = self.ImageHandler.get_band_array(img, 1)
array1 = Image.open(in_path)
img = np.array(array1)
# we process the entire img as float64 to avoid type overflow error
img = np.float64(img)
img_filtered = np.zeros_like(img)
# n, m = img.shape
# win_offset = win_size / 2
#
# for i in range(0, n):
# xleft = i - win_offset
# xright = i + win_offset
#
# if xleft < 0:
# xleft = 0
# if xright >= n:
# xright = n
#
# for j in range(0, m):
# yup = j - win_offset
# ydown = j + win_offset
#
# if yup < 0:
# yup = 0
# if ydown >= m:
# ydown = m
#
# self.assert_indices_in_range(n, m, xleft, xright, yup, ydown)
#
# pix_value = img[i, j]
#
# window = img[math.ceil(xleft):int(xright)+1, math.ceil(yup):int(ydown)+1]
# w_t = self.weighting(window, cu)
# window_mean = window.mean()
# new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
#
# if not new_pix_value > 0:
# new_pix_value = 0
# img_filtered[i, j] = round(new_pix_value)
# # return img_filtered
self.lee_filter_array(img, img_filtered, win_size)
out_image = Image.fromarray(img_filtered)
out_image.save(out_path)
print("lee_filter finish! path:" + out_path)
return True
@staticmethod
def lee_filter_array(in_arry, out_arry, win_size):
"""
Apply lee to a numpy matrix containing the image, with a window of
win_size x win_size.
"""
f = Filter()
#cu = CU_DEFAULT
f.assert_window_size(win_size)
img = in_arry
# we process the entire img as float64 to avoid type overflow error
img = np.float64(img)
img = img + 100
# lee_filter_array(np.ndarray[double,ndim=2] img,np.ndarray[double,ndim=2] out_arryint win_offset,int win_size):
newOUt=lee_Filter_c.lee_filter_array(img,out_arry,win_size)
newOUt=newOUt-100
out_arry[:,:]=newOUt[:,:]
# def lee_filter_array(self, in_arry, out_arry, win_size):
# """
# Apply lee to a numpy matrix containing the image, with a window of
# win_size x win_size.
# """
# cu = CU_DEFAULT
# self.assert_window_size(win_size)
# img = in_arry
# # we process the entire img as float64 to avoid type overflow error
# img = np.float64(img)
# img = img + 100
# img_filtered = np.zeros_like(img)
# n, m = img.shape
# win_offset = win_size / 2
#
# for i in range(0, n):
# xleft = i - win_offset
# xright = i + win_offset
#
# if xleft < 0:
# xleft = 0
# if xright >= n:
# xright = n
#
# for j in range(0, m):
# yup = j - win_offset
# ydown = j + win_offset
#
# if yup < 0:
# yup = 0
# if ydown >= m:
# ydown = m
#
# self.assert_indices_in_range(n, m, xleft, xright, yup, ydown)
#
# pix_value = img[i, j]
#
# window = img[math.ceil(xleft):int(xright)+1, math.ceil(yup):int(ydown)+1]
# w_t = self.weighting(window, cu)
# window_mean = window.mean()
# new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
#
# if not new_pix_value > 0:
# new_pix_value = 0
# out_arry[i, j] = round(new_pix_value)
# out_arry = out_arry - 100
#
def lee_filter_multiprocess(self, in_paths, out_paths, win_size =3,processes_num=10):
if len(in_paths) != len(out_paths):
return False
# 开启多进程处理
pool = multiprocessing.Pool(processes=processes_num)
pl = []
for i in range(len(in_paths)):
#self.lee_filter(in_paths[i], out_paths[i], win_size)
pl.append(pool.apply_async(self.lee_filter,(in_paths[i], out_paths[i], win_size)))
print("lee_filter runing! path:" + in_paths[i])
pool.close()
pool.join()
return True
def lee_filter_block_multiprocess(self, in_path, out_path, win_size =3):
in_name = os.path.basename(in_path)
out_name = os.path.basename(out_path)
outDir= os.path.split(out_path)[0]
#创建工作文件夹
src_path = os.path.join(outDir, "src_img")
block_path = os.path.join(outDir, "block")
block_filtered = os.path.join(outDir, "block_filtered")
file.creat_dirs([src_path, block_path, block_filtered])
shutil.copyfile(in_path, os.path.join(src_path, in_name))
cols = ImageHandler.get_img_width(in_path)
rows = ImageHandler.get_img_height(in_path)
# 分块
bp = BlockProcess()
block_size = bp.get_block_size(rows, cols)
bp.cut(src_path, block_path, ['tif', 'tiff'], 'tif', block_size)
logger.info('blocking tifs success!')
img_dir, img_name = bp.get_file_names(block_path, ['tif'])
dir_dict = bp.get_same_img(img_dir, img_name)
img_path_list = [value for value in dir_dict.values()][0]
processes_num = min([len(img_path_list), multiprocessing.cpu_count() - 1])
out_img_path_list =[]
for in_path in img_path_list:
suffix = bp.get_suffix(os.path.basename(in_path))
out_path = os.path.join(block_filtered, out_name.replace('.tif', suffix))
out_img_path_list.append(out_path)
self.lee_filter_multiprocess(img_path_list, out_img_path_list, win_size = win_size, processes_num=processes_num)
# 开启多进程处理
# pool = multiprocessing.Pool(processes=processes_num)
#
# for i in range(len(hh_list)):
# block_img_path = hh_list[i]
# suffix = bp.get_suffix(os.path.basename(hh_list[i]))
# filed_block_img_path = os.path.join(block_filtered,out_name.replace('.tif',suffix))
# pool.apply_async(self.lee_filter, (block_img_path, filed_block_img_path, win_size))
# print("lee_filter runing! path:" + block_img_path)
# logger.info('total:%s, block:%s lee_filter!', len(hh_list), i)
#
# pool.close()
# pool.join()
# # 合并处理后的影像
bp.combine(block_filtered, cols, rows, outDir, file_type=['tif'], datetype='float32')
file.del_folder(src_path)
file.del_folder(block_path)
file.del_folder(block_filtered)
pass
def lee_process_sar(self, in_sar, out_sar, win_size, noise_var):
'''
# std::cout << "mode 12"
# std::cout << "SIMOrthoProgram.exe 12 in_sar_path out_sar_path win_size noise_var"
'''
exe_path = r".\baseTool\x64\Release\SIMOrthoProgram-S-SAR.exe"
exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3} {4} {5}".format(exe_path, 12, in_sar,
out_sar, win_size, noise_var)
print(exe_cmd)
print(os.system(exe_cmd))
print("==========================================================================")
if __name__ == '__main__':
# 示例1
# path = r"I:\MicroWorkspace\product\C-SAR\LeafAreaIndex\Temporary\cai_sartif\HV_0_512_0_512.tif"
# f = Filter()
# f.lee_filter(path,path,3)
#示例2
f = Filter()
f.lee_filter_block_multiprocess('I:\preprocessed\HH.tif','I:\preprocessed\HHf.tif')
pass

View File

@ -0,0 +1,124 @@
# -*- coding: UTF-8 -*-
"""
@Project:__init__.py
@File:lee_filter.py
@Function:lee_filter
@Contact: https://github.com/PyRadar/pyradar
@Author:SHJ
@Date:2021/8/30 8:42
@Version:1.0.0
"""
import os
cimport cython # 必须导入
import numpy as np##必须为c类型和python类型的数据都申明一个np
cimport numpy as np # 必须为c类型和python类型的数据都申明一个np
from libc.math cimport pi
from libc.math cimport atan as math_atan
from libc.math cimport log10 as math_log10
from libc.math cimport log as math_log
from libc.math cimport floor as math_floor
from libc.math cimport sqrt as math_sqrt
from libc.math cimport exp as math_exp
from libc.math cimport sin as math_sin
from libc.math cimport cos as math_cos
from libc.math cimport tan as math_tan
from libc.math cimport asin as math_asin
from libc.math cimport acos as math_acos
from libc.math cimport tan as math_atan
from libc.math cimport sinh as math_sinh
from libc.math cimport cosh as math_cosh
from libc.math cimport tanh as math_tanh
from libc.math cimport floor as math_floor
from libc.math cimport ceil as math_ceil
from libc.math cimport lround as math_round
cdef double COEF_VAR_DEFAULT = 0.01
cdef double CU_DEFAULT = 0.25
cdef int ceil_usr(double v):
return int(math_ceil(v))
cdef double weighting(np.ndarray[double,ndim=2] window,double cu):
"""
Computes the weighthing function for Lee filter using cu as the noise
coefficient.
"""
# cu is the noise variation coefficient
cdef double two_cu = cu * cu
# ci is the variation coefficient in the window
cdef double window_mean = window.mean()
cdef double window_std = window.std()
cdef double ci = window_std / window_mean
cdef double two_ci = ci * ci
cdef double w_t=0;
if not (two_ci==0): # dirty patch to avoid zero division
two_ci = COEF_VAR_DEFAULT
if cu > ci:
w_t = 0.0
else:
w_t = 1.0 - (two_cu / two_ci)
return w_t
cpdef np.ndarray[double,ndim=2] lee_filter_array(np.ndarray[double,ndim=2] img,np.ndarray[double,ndim=2] out_arry,int win_size):
"""
Apply lee to a numpy matrix containing the image, with a window of
win_size x win_size.
"""
# we process the entire img as float64 to avoid type overflow error
#n, m = img.shape
cdef double cu = CU_DEFAULT
cdef int i=0
cdef int j=0
cdef int xleft=0
cdef int xright=0
cdef int yup=0
cdef int ydown=0
cdef np.ndarray[double,ndim=2] window;
cdef double w_t=0;
cdef double window_mean=0;
cdef double new_pix_valu=0;
cdef int n = img.shape[0]
cdef int m=img.shape[1]
cdef int win_offset=int(win_size/2)
while i<n:
xleft=ceil_usr(i-win_offset)
xright=int(i+win_offset)
if xleft < 0:
xleft = 0
if xright >= n:
xright = n
j=0
while j<m:
yup = ceil_usr(j - win_offset)
yup=0 if yup<0 else yup
ydown = int(j + win_offset)
if yup < 0:
yup = 0
if ydown >= m:
ydown = m
pix_value = img[i, j]
window = img[xleft:xright+1, yup:ydown+1]
w_t = weighting(window, cu)
window_mean = np.mean(window)
new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
if not new_pix_value > 0:
new_pix_value = 0
out_arry[i, j] = round(new_pix_value*100000.0)/100000.0
j=j+1
i=i+1
return out_arry

View File

@ -0,0 +1,45 @@
from setuptools import setup
from setuptools.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
import numpy
from pathlib import Path
import shutil
class MyBuildExt(build_ext):
def run(self):
build_ext.run(self)
build_dir = Path(self.build_lib)
root_dir = Path(__file__).parent
target_dir = build_dir if not self.inplace else root_dir
self.copy_file(Path('./lee_Filter') / '__init__.py', root_dir, target_dir)
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
def copy_file(self, path, source_dir, destination_dir):
if not (source_dir / path).exists():
return
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
setup(
name="MyModule",
ext_modules=cythonize(
[
#Extension("pkg1.*", ["root/pkg1/*.py"]),
Extension("pkg2.*", ["./lee_Filter/lee_Filter_c.pyx"]),
#Extension("1.*", ["root/*.py"])
],
build_dir="build",
compiler_directives=dict(
always_allow_keywords=True
)),
cmdclass=dict(
build_ext=MyBuildExt
),
packages=[],
include_dirs=[numpy.get_include()],
)
# 指令: python setup.py build_ext --inplace

View File

@ -0,0 +1,90 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File logHandler.py
@Function 日志检查生成
@Author SHJ
@Date 2021/12/1
@Version 1.0.0
"""
import logging
import os
import time
import datetime
class LogHandler:
"""
生成日志
"""
__logger = logging.getLogger("mylog")
__format_str = logging.Formatter("[%(asctime)s] [%(process)d] [%(levelname)s] - %(module)s.%(funcName)s "
"(%(filename)s:%(lineno)d) - %(message)s")
__log_path = None
@staticmethod
def init_log_handler(log_name):
"""
初始化日志
:param log_name: 日志保存的路径和名称
:return:
"""
path = os.getcwd()
current_time = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time()))
LogHandler.__log_path = os.path.join(path, log_name + current_time + ".log")
para_dir = os.path.split(LogHandler.__log_path)
if not os.path.exists(para_dir[0]):
os.makedirs(para_dir[0])
# 删除七天以前的文件
LogHandler.delete_outdate_files(para_dir[0])
# 方法1普通日志
log_format = "[%(asctime)s] [%(process)d] [%(levelname)s]- %(message)s ---from: %(module)s.%(funcName)s" \
" (%(filename)s:Line%(lineno)d) "
date_format = "%m/%d/%Y %H:%M:%S"
fp = logging.FileHandler(LogHandler.__log_path, encoding='utf-8')
fs = logging.StreamHandler()
logging.basicConfig(level=logging.INFO, format=log_format, datefmt=date_format, handlers=[fp, fs]) # 调用
# 方法2回滚日志
# LogHandler.__logger.setLevel(logging.DEBUG)
# th = handlers.TimedRotatingFileHandler(filename=LogHandler.__log_path, when='S', interval=1,
# backupCount=2, encoding='utf-8')
# th.suffix = "%Y-%m-%d-%H-%M-%S.log"
# th.setFormatter(LogHandler.__format_str)
# th.setLevel(level=logging.DEBUG)
# console = logging.StreamHandler()
# console.setLevel(logging.INFO)
# LogHandler.__logger.addHandler(console)
# LogHandler.__logger.addHandler(th)
@staticmethod
def delete_outdate_files(path, date_interval=7):
"""
删除目录下七天前创建的文件
"""
current_time = time.strftime("%Y-%m-%d", time.localtime(time.time()))
current_time_list = current_time.split("-")
current_time_day = datetime.datetime(int(current_time_list[0]), int(current_time_list[1]),
int(current_time_list[2]))
for root, dirs, files in os.walk(path):
for item in files:
item_format = item.split(".", 2)
if item_format[1] == "log":
file_path = os.path.join(root, item)
create_time = time.strftime("%Y-%m-%d", time.localtime((os.stat(file_path)).st_mtime))
create_time_list = create_time.split("-")
create_time_day = datetime.datetime(int(create_time_list[0]), int(create_time_list[1]),
int(create_time_list[2]))
time_difference = (current_time_day - create_time_day).days
if time_difference > date_interval:
os.remove(file_path)
#
# if __name__ == "__main__":
# # eg2:
# log_handler = LogHandler()
# log_handler.init_log_handler(r"run_log\myrun1")
# logging.warning("1")
# print("done")

View File

@ -0,0 +1,90 @@
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 14 18:53:14 2021
@author: Dipankar
References
----------
Oh (2004): Quantitative retrieval of soil moisture content and surface roughness from multipolarized radar observations of bare soil surface. IEEE TGRS 42(3). 596-601.
"""
# ---------------------------------------------------------------------------------------
# Copyright (C) 2021 by Microwave Remote Sensing Lab, IITBombay http://www.mrslab.in
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, see http://www.gnu.org/licenses/
# ---------------------------------------------------------------------------------------
import numpy as np
#import matplotlib.pyplot as plt
## Description: Given sigma_0_vv, sigma_0_hh, and sigma_0_hv, the inverse
## model computes s, and mv
sigma0vvdB = -14.1
sigma0hhdB = -16.0
sigma0hvdB = -26.5
theta = 35. ##Incidence angle
f = 5.0 ##GHz
k = 2*np.pi*f/0.3 #calculate the wave number
theta_rad = theta*np.pi/180 #represent angle in radians
sigma_0_vv = np.power(10,(sigma0vvdB/10)) #%represent data in linear scale
sigma_0_hh = np.power(10,(sigma0hhdB/10))
sigma_0_hv = np.power(10,(sigma0hvdB/10))
p = sigma_0_hh / sigma_0_vv #calculate the p-ratio
q = sigma_0_hv / sigma_0_vv #calculate the q-ratio
mv0 = np.arange(0.05,0.5,0.01) # set Gamma0 range of values (fine increments)
## First estimates s1 and mv1
ks = ((-1)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv0**0.7 * (np.cos(theta_rad))**2.2)))**0.556
err = (1 - (2.*theta_rad/np.pi)**(0.35*mv0**(-0.65)) * np.exp(-0.4 * ks**1.4))-p
abs_err = np.abs(err)
min_err = np.min(abs_err) #find the value of minimum error
mv1 = mv0[np.where(abs_err == min_err)]
ks1 = ((-1)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv1**0.7 * (np.cos(theta_rad))**2.2)))**0.556
s1 = ks1/k
## Second estimate s2 and mv2
ks2 = (np.log(1-(q/(0.095 * (0.13 + np.sin(1.5*theta_rad))**1.4))) /(-1.3))**(10./9.)
s2 = ks2/k
xx = (1-p)/np.exp(-0.4 * ks2**1.4)
if xx<=0:
mv2 =0
else:
yy = np.log(xx)/(0.35*np.log(2*theta_rad/np.pi))
mv2 = yy**(-100/65)
print(mv2,yy,np.power(yy,-100/65))
## Third estimate mv3
mv3 = ((sigma_0_hv/(1 - np.exp(-0.32 * ks2**1.8)))/(0.11 * np.cos(theta_rad)**2.2))**(1/0.7)
## weighted average s and mv-------------------------------------
sf = (s1 + 0.25*s2)/(1+0.25)
mvf = (mv1+mv2+mv3)/3
print(mv1,mv2,mv3,s1,s2)
print('Estimated rms height s (cm): ', sf*100)
print('Estimated volumetric soil moisture: ', mvf)

View File

@ -0,0 +1,128 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 4 14:59:54 2013
@author: Sat Kumar Tomer
@email: satkumartomer@gmail.com
@website: www.ambhas.com
"""
cimport cython # 必须导入
import numpy as np##必须为c类型和python类型的数据都申明一个np
cimport numpy as np # 必须为c类型和python类型的数据都申明一个np
from libc.math cimport pi
from scipy.optimize import fmin
cpdef np.ndarray[double,ndim=1] inverse_oh2004(double sigma0vvdB,double sigma0hhdB,double sigma0hvdB,double theta,double f):
"""
sigma0vvdB = -14.1 dB
sigma0hhdB = -16.0
sigma0hvdB = -26.5
theta = 35. ##Incidence angle
f = 5.0 ##GHz
"""
#print("--------------------------------------------------------\n")
cdef np.ndarray[double,ndim=1] result=np.ones((2))
result[0]=np.nan
result[1]=np.nan
#print("*************设置为nan****************")
#print(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f)
cdef double k = 2*3.1415926*f/0.299792458; #calculate the wave number
cdef double theta_rad = theta*3.1415926/180; #represent angle in radians
cdef double sigma_0_vv = np.power(10.,(sigma0vvdB/10.)) #%represent data in linear scale
cdef double sigma_0_hh = np.power(10.,(sigma0hhdB/10.))
cdef double sigma_0_hv = np.power(10.,(sigma0hvdB/10.))
if sigma_0_vv==0:
#print("***********sigma_0_vv==0*************")
return result
cdef double p = sigma_0_hh / sigma_0_vv; #calculate the p-ratio
cdef double q = sigma_0_hv / sigma_0_vv; #calculate the q-ratio
cdef np.ndarray[double,ndim=1] mv0 = np.arange(0.05,0.9,0.01) # set Gamma0 range of values (fine increments)
## First estimates s1 and mv1
cdef np.ndarray[double,ndim=1] ks = ((-1.)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv0**0.7 * (np.cos(theta_rad))**2.2)))**0.556
cdef np.ndarray[double,ndim=1] err = (1. - (2.*theta_rad/np.pi)**(0.35*mv0**(-0.65)) * np.exp(-0.4 * ks**1.4))-p
cdef np.ndarray[double,ndim=1] abs_err = np.abs(err);
cdef double min_err = np.nanmin(abs_err); #find the value of minimum error
#print(np.where(abs_err == min_err)[0].shape)
if min_err==np.nan or np.max(np.where(abs_err == min_err)[0].shape)==0 :
#print("***************min_err==np.nan or np.max(np.where(abs_err == min_err)[0].shape)==0")
return result
cdef double mv1 = mv0[np.where(abs_err == min_err)[0][0]]
cdef double temp_ks1=1. - sigma_0_hv/(0.11 * mv1**0.7 * (np.cos(theta_rad))**2.2)
if temp_ks1<0:
#print("*********************temp_ks1<0")
return result
cdef double ks1 = ((-1)*3.125*np.log(temp_ks1))**0.556
cdef double s1 = ks1/k
## Second estimate s2 and mv2
cdef double ks2 = (np.log(1-(q/(0.095 * (0.13 + np.sin(1.5*theta_rad))**1.4))) /(-1.3))**(10./9.)
cdef double s2 = ks2/k
cdef double mv2 =0.
cdef double yy =0.
cdef double xx = (1-p)/np.exp(-0.4 * ks2**1.4)
if xx<=0:
mv2 =0.
else:
yy = np.log(xx)/(0.35*np.log(2*theta_rad/np.pi))
mv2=np.power(yy,-100.0/65)
## Third estimate mv3
cdef double mv3 = ((sigma_0_hv/(1 - np.exp(-0.32 * ks2**1.8)))/(0.11 * np.cos(theta_rad)**2.2))**(1/0.7)
## weighted average s and mv-------------------------------------
#print("q:\t",q)
#print("k:\t",k)
#print("ks1:\t",ks1)
#print("ks2:\t",ks2)
#print("theta_rad:\t",theta_rad)
cdef double sf = (s1 + 0.25*s2)/(1+0.25)
cdef double mvf = (mv1+mv2+mv3)/3
result[0]=mvf*1.0
result[1]=sf*1.0
#print("mv1:\t",mv1)
#print("mv2:\t",mv2)
#print("mv3:\t",mv3)
#print("s1:\t",s1)
#print("s2:\t",s2)
#print("Estimated volumetric soil moisture: ",result[0])
#print("Estimated rms height s (m): ",result[1])
#print("\nend\n")
return result
cpdef double lamda2freq(double lamda):
return 299792458.0/lamda
cpdef double freq2lamda(double freq):
return 299792458.0/freq
# double sigma0vvdB,double sigma0hhdB,double sigma0hvdB,double theta,double f
cpdef int retrieve_oh2004_main(int n,np.ndarray[double,ndim=1] mv,np.ndarray[double,ndim=1] h,np.ndarray[int,ndim=1] mask,np.ndarray[double,ndim=1] sigma0vvdB,np.ndarray[double,ndim=1] sigma0hhdB,np.ndarray[double,ndim=1] sigma0hvdB, np.ndarray[double,ndim=1] vh, np.ndarray[double,ndim=1] theta,double f):
cdef int i=0;
cdef np.ndarray[double,ndim=1] result;
while i<n:
if mask[i]<0.5:
mv[i]=np.nan
h[i] =np.nan
else:
#print(i)
##print(sigma0vvdB[i], sigma0hhdB[i],sigma0hvdB[i], theta[i], f)
result= inverse_oh2004(sigma0vvdB[i], sigma0hhdB[i],sigma0hvdB[i], theta[i], f)
##print(result)
mv[i]=result[0]
h[i] =result[1]
##print(mv[i],h[i])
##print(result[0],result[1])
i=i+1
return 1

View File

@ -0,0 +1,45 @@
from setuptools import setup
from setuptools.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
import numpy
from pathlib import Path
import shutil
class MyBuildExt(build_ext):
def run(self):
build_ext.run(self)
build_dir = Path(self.build_lib)
root_dir = Path(__file__).parent
target_dir = build_dir if not self.inplace else root_dir
self.copy_file(Path('./oh2004') / '__init__.py', root_dir, target_dir)
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
def copy_file(self, path, source_dir, destination_dir):
if not (source_dir / path).exists():
return
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
setup(
name="MyModule",
ext_modules=cythonize(
[
#Extension("pkg1.*", ["root/pkg1/*.py"]),
Extension("pkg2.*", ["./oh2004/oh2004.pyx"]),
#Extension("1.*", ["root/*.py"])
],
build_dir="build",
compiler_directives=dict(
always_allow_keywords=True
)),
cmdclass=dict(
build_ext=MyBuildExt
),
packages=[],
include_dirs=[numpy.get_include()],
)
# 指令: python setup.py build_ext --inplace

View File

@ -0,0 +1,26 @@
import numpy as np
import oh2004
sigma0vvdB = -14.1
sigma0hhdB = -16.0
sigma0hvdB = -26.5
theta = 35. ##Incidence angle
f = 5.0 ##GHz
#print(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f)
#print(oh2004.inverse_oh2004(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f))
n=3
mask=np.ones((3))
mask[1]=0
mask=mask.astype(np.int32)
sigma0hhdB=np.ones((3))*sigma0hhdB
sigma0vvdB=np.ones((3))*sigma0vvdB
sigma0hvdB=np.ones((3))*sigma0hvdB
theta=np.ones((3))*theta
mv=np.zeros(3)*1.0
h=np.zeros(3)*1.0
oh2004.retrieve_oh2004_main(n,mv,h, mask,sigma0vvdB,sigma0hhdB,sigma0hvdB,sigma0hvdB, theta,f)
print(mask)
print(mv)
print(h)

Some files were not shown because too many files have changed in this diff Show More