上传S-SAR高程算法

dev
tian jiax 2025-02-14 15:31:48 +08:00
parent 6f302a1950
commit c6a2330dd4
2511 changed files with 682594 additions and 0 deletions

504
dem-S-SAR/AlgXmlHandle.py Normal file
View File

@ -0,0 +1,504 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File AlgXmlHandle.py
@Function 算法描述文件读写和检查
@Contact https://www.cnblogs.com/feifeifeisir/p/10893127.html
@Author SHJ
@Date 2021/9/6
@Version 1.0.0
"""
import logging
from xml.etree.ElementTree import ElementTree
import os
import re
import platform
import psutil
import multiprocessing
import ctypes
from tool.algorithm.image import ImageHandle
logger = logging.getLogger("mylog")
class ManageAlgXML:
"""
检查和读取XML文件信息
"""
def __init__(self, xml_path):
self.in_path = xml_path
self.__tree = ElementTree()
self.__root = None
self.__alg_compt = None
self.__workspace_path = None
self.__taskID = None
self.__envs = {}
self.__input_paras = {}
self.__output_paras = {}
self.__init_flag = False
def init_xml(self):
"""
初始化XML文件
:return: True初始化成功 False 初始化失败
"""
try:
self.__tree.parse(self.in_path)
except FileNotFoundError as ex:
raise Exception(ex)
except BaseException:
raise Exception("cannot open algXMl")
self.__root = self.__tree.getroot()
if self.__root is None:
raise Exception("get root failed")
self.__alg_compt = self.__root.find("AlgCompt")
if self.__alg_compt is None:
raise Exception("get AlgCompt failed")
self.__workspace_path = self.__check_workspace_path()
if self.__workspace_path is None:
raise Exception("check workspace_path failed")
self.__taskID = self.__check_task_id()
if self.__taskID is None:
raise Exception("check taskID failed")
self.__envs = self.__check_environment()
if self.__envs is None or self.__envs == {}:
raise Exception("check environment failed")
self.__input_paras = self.__check_input_para()
if self.__input_paras is None or self.__input_paras == {}:
raise Exception("check input para failed")
self.__output_paras = self.__check_output_para()
self.__init_flag = True
return True
def get_workspace_path(self):
"""
获取工作空间路径
:return: 工作空间路径 None-异常
"""
if not self.__init_flag:
raise Exception("XML is not initialized")
return self.__workspace_path
def get_task_id(self):
"""
获取任务ID
:return: taskID None-异常
"""
if not self.__init_flag:
raise Exception("XML is not initialized")
return self.__taskID
def get_envs(self):
"""
获取运行环境要求
:return:运行环境要求 None-异常
"""
if not self.__init_flag:
raise Exception("XML is not initialized")
return self.__envs
def get_input_paras(self):
"""
获取输入参数
:return:输入参数 None-异常
"""
if not self.__init_flag:
raise Exception("XML is not initialized")
return self.__input_paras
def get_output_paras(self):
"""
获取输出参数
:return:输出参数 None-异常
"""
if not self.__init_flag:
raise Exception("XML is not initialized")
return self.__output_paras
def __check_workspace_path(self):
"""
检查工作空间路径
:return: 工作空间路径 None-异常
"""
workspace_note = self.__root.find("WorkSpace")
workspace_path = str(workspace_note.text).replace("\n", "").replace(' ', '') #去除空格和回车
if workspace_path is None:
raise Exception("'workspace_path' is None")
if not os.path.isdir(workspace_path):
raise Exception("'workspace_path' is not save:%s",workspace_path)
if workspace_path[-1] != '\\':
workspace_path += "'\'"
return workspace_path
def __check_environment(self):
"""
检查XML文件中运行环境要求
:return: dic-运行环境要求 None-异常
"""
env_note = self.__alg_compt.find("Environment")
is_cluster = int(env_note.find("IsCluster").text.replace("\n", "").replace(' ', ''))
is_legal = is_cluster in [0, 1]
if not is_legal:
raise Exception("IsCluster is not 0 or 1")
cluster_num = int(env_note.find("ClusterNum").text)
is_legal = cluster_num in [0, 1, 2, 3, 4, 5, 6, 7]
if not is_legal:
raise Exception("cluster_num is beyond [0,1,2,3,4,5,6,7]")
operating_system = env_note.find("OperatingSystem").text.replace("\n", "").replace(' ', '') #去除空格和回车
# is_legal = operating_system in ["Windows10", "Windows7", "WindowsXP"]
# if not is_legal:
# raise Exception("OperatingSystem is beyond [Windows10, Windows7, WindowsXP]")
cpu = env_note.find("CPU").text.replace("\n", "").replace(' ', '') #去除空格和回车
is_legal = cpu in ["单核", "双核", "3核", "4核", "6核", "8核"]
if not is_legal:
raise Exception("OperatingSystem is beyond [单核, 双核, 3核, 4核, 6核, 8核]")
memory = env_note.find("Memory").text.replace("\n", "").replace(' ', '') #去除空格和回车
is_legal = memory in ["1GB", "2GB", "4GB", "6GB", "8GB", "10GB", "12GB", "16GB"]
if not is_legal:
raise Exception("OperatingSystem is beyond [1GB, 2GB, 4GB, 6GB, 8GB, 10GB, 12GB, 16GB]")
storage = env_note.find("Storage").text.replace("\n", "").replace(' ', '') #去除空格和回车
is_legal = int(storage[:-2]) > 0
if not is_legal:
raise Exception("Storage < 0GB")
network_card = env_note.find("NetworkCard").text
# is_legal = network_card in ["无需求"]
# if not is_legal:
# # 输出异常
# return
band_width = env_note.find("Bandwidth").text
# is_legal = band_width in ["无需求"]
# if not is_legal:
# # 输出异常
# return
gpu = env_note.find("GPU").text
# is_legal = GPU in ["无需求"]
# if not is_legal:
# # 输出异常
# return
envs = {"is_Cluster": is_cluster, "cluster_num": cluster_num, "operating_system": operating_system,
"CPU": cpu, "memory": memory}
envs.update({"Storage": storage, "network_card": network_card, "band_width": band_width, "GPU": gpu})
return envs
def __check_input_para(self):
"""
检查XML文件中输入参数
:return: dic-输入参数 None-异常
"""
input_paras_note = self.__alg_compt.find("Inputs")
paras_num = int(input_paras_note.attrib.get("ParameterNum"))
para_list = input_paras_note.findall("Parameter")
if paras_num != len(para_list):
msg ="'ParameterNum':"+ str(paras_num) + " != number of 'Parameter':" + str(len(para_list))
raise Exception(msg)
input_paras = {}
for para in para_list:
para_name = para.find("ParaName").text.replace("\n", "").replace(' ', '') #去除空格和回车
para_chs_name = para.find("ParaChsName").text.replace("\n", "").replace(' ', '') #去除空格和回车
para_type = para.find("ParaType").text.replace("\n", "").replace(' ', '') #去除空格和回车
data_type = para.find("DataType").text.replace("\n", "").replace(' ', '') #去除空格和回车
para_value = para.find("ParaValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
input_para = {"ParaName": para_name, "ParaChsName": para_chs_name, "ParaType": para_type,
"DataType": data_type, "ParaValue": para_value}
#print(para_name)
if para_type == "Value":
max_value = para.find("MaxValue").text
min_value = para.find("MinValue").text
option_value = para.find("OptionValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
input_para.update({"MaxValue": max_value, "MinValue": min_value, "OptionValue": option_value})
if para_name is None or para_type is None or para_value is None:
msg = 'there is None among para_name:' + para_name + ',para_type:' + para_type + 'or para_value:' + para_value + '!'
raise Exception(msg)
input_paras.update({para_name: input_para})
return input_paras
def __check_output_para(self):
"""
检查XML文件中输出参数
:return: dic-输出参数 None-异常
"""
output_paras_note = self.__alg_compt.find("Outputs")
paras_num = int(output_paras_note.attrib.get("ParameterNum"))
para_list = output_paras_note.findall("Parameter")
if paras_num != len(para_list):
raise Exception("'ParameterNum' != number of 'Parameter'")
output_paras = {}
# for para in para_list:
#
# para_name = para.find("ParaName").text.replace("\n", "").replace(' ', '') #去除空格和回车
# para_chs_name = para.find("ParaChsName").text.replace("\n", "").replace(' ', '') #去除空格和回车
# para_type = para.find("ParaType").text.replace("\n", "").replace(' ', '') #去除空格和回车
# data_type = para.find("DataType").text.replace("\n", "").replace(' ', '') #去除空格和回车
# para_value = para.find("ParaValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
# no_data_value = para.find("NoDataValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
# output_para = {"ParaName": para_name, "ParaChsName": para_chs_name, "ParaType": para_type,
# "DataType": data_type, "ParaValue": para_value, "NoDataValue": no_data_value}
#
# if para_type == "Value":
# max_value = para.find("MaxValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
# min_value = para.find("MinValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
# option_value = para.find("OptionValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
# output_para.update({"MaxValue": max_value, "MinValue": min_value, "OptionValue": option_value})
#
# if para_name is None or para_type is None or para_value is None:
# msg = 'there is None among para_name:'+ para_name + ',para_type:'+ para_type + 'or para_value:'+ para_value +'!'
# raise Exception(msg)
# output_paras.update({para_name: output_para})
return output_paras
def write_out_para(self,para_name='BackScatteringProduct', para_value="D:\\workspace\\Output\\BackScatteringProduct.tar.gz"):
"""
写入输出参数
"""
self.__tree.parse(self.in_path)
root = self.__tree.getroot()
alg_compt = root.find("AlgCompt")
output_paras_note =alg_compt.find("Outputs")
para_list = output_paras_note.findall("Parameter")
flag = False
for para in para_list:
if para.find("ParaName").text == para_name:
para.find("ParaValue").text = para_value
flag = True
if flag == False:
raise Exception('Cannot find Output Parameter:'+para_name+'!')
self.__tree.write(self.in_path, encoding="utf-8", xml_declaration=True)
def __check_task_id(self):
"""
检查任务ID
:return: taskID None-异常
"""
task_id_note = self.__root.find("TaskID")
task_id = str(task_id_note.text).replace("\n", "").replace(' ', '') #去除空格和回车
if task_id is None:
raise Exception("'TaskID' is None")
return task_id
class CheckSource:
"""
检查配置文件中资源的完整性和有效性
"""
def __init__(self, alg_xml_handle):
self.__alg_xml_handle = alg_xml_handle
self.imageHandler = ImageHandle.ImageHandler()
self.__ParameterDic={}
def check_alg_xml(self):
"""
检查算法配置文件
"""
if self.__alg_xml_handle.init_xml():
logger.info('init algXML succeed')
return True
else:
raise Exception('init algXML failed')
def check_run_env(self):
"""
:return: True-正常False-异常
"""
envs = self.__alg_xml_handle.get_envs()
# 检查操作系统
local_plat = platform.platform()
local_plat_list = local_plat.split("-")
flag = envs['operating_system'] == local_plat_list[0]+local_plat_list[1]
if flag is False:
msg = 'operating_system:' + local_plat_list[0] + local_plat_list[1] + ' is not ' + envs['operating_system']
#raise Exception(msg)
# 检查电脑显存
mem = psutil.virtual_memory()
mem_total = int(round(mem.total / 1024 / 1024 / 1024, 0))
mem_free = round(mem.free / 1024 / 1024 / 1024, 0)
env_memory = envs['memory']
env_memory = int(env_memory[:-2])
if env_memory > mem_total:
msg = 'memory_total ' + str(mem_total) + ' less than'+str(env_memory) + 'GB'
raise Exception(msg)
if env_memory >= mem_free:
msg = 'mem_free ' + str(mem_free) + 'GB less than' + str(env_memory) + 'GB'
logger.warning(msg)
# 检查CPU核数
env_cpu = envs['CPU']
if env_cpu == "单核":
env_cpu_core_num = 1
elif env_cpu == "双核":
env_cpu_core_num = 2
elif env_cpu == "三核":
env_cpu_core_num = 3
else:
env_cpu_core_num = int(env_cpu[:-1])
local_cpu_core_num = int(multiprocessing.cpu_count()/2)
if env_cpu_core_num > local_cpu_core_num:
msg = 'CPU_core_num ' + str(local_cpu_core_num) + 'core less than' + str(env_cpu_core_num) + ' core'
raise Exception(msg)
# 检查磁盘的内存
env_storage = envs['Storage']
env_storage = int(env_storage[:-2])
workspace_path = self.__alg_xml_handle.get_workspace_path()
if not os.path.isdir(workspace_path):
raise Exception('workspace_path:%s do not exist!', workspace_path)
local_storage = self.__get_free_space_mb(workspace_path)
if env_storage > local_storage:
msg = 'workspace storage ' + str(local_storage) + 'GB less than' + envs['Storage'] +"GB"
# raise Exception(msg)
return True
@staticmethod
def __get_free_space_mb(folder):
"""
:param folder:检查的路径 eg:'C:\\'
:return: folder/drive free space (GB)
"""
if platform.system() == 'Windows':
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder), None, None, ctypes.pointer(free_bytes))
return free_bytes.value / 1024 / 1024 / 1024
else:
st = os.statvfs(folder)
return st.f_bavail * st.f_frsize / 1024 / 1024
def check_input_paras(self, input_para_names):
"""
:param input_para_names :需要检查参数的名称列表[name1,name2,...]
:return: 检测是否正常
"""
workspace_path = self.__alg_xml_handle.get_workspace_path()
input_paras = self.__alg_xml_handle.get_input_paras()
for name in input_para_names:
para = input_paras[name]
if para is None:
msg = "check para:"+name + " is failed!"+"para is None!"
raise Exception(msg)
if para['ParaType'] == 'File':
if para['DataType'] == 'tif':
para_value_list = para['ParaValue'].split(";")
for para_value in para_value_list:
# para_path = workspace_path + para_value
para_path = para_value
if self.__check_tif(para_path) is False:
msg = "check para:"+name + " is failed!" + "Path:" + para_path
raise Exception(msg)
if para['DataType'] == 'xml':
para_path = workspace_path + para['ParaValue']
if not os.path.exists(para_path):
raise Exception('para_file:%s is inexistent!', para_path)
if para['DataType'] == 'File':
para_path = workspace_path + para['ParaValue']
if os.path.isdir(para_path) is False:
msg = "check para:" + name + " is failed!" + "FilePath:" + para_path
raise Exception(msg)
elif para['ParaType'] == 'Value':
if para['DataType'] == 'float' or para['DataType'] == 'int' or para['DataType'] == 'double':
if para['ParaValue'] is None:
msg = "check para:"+name + " is failed!"+"'ParaValue' is None"
raise Exception(msg)
if self.__is_number(para['ParaValue']) is False:
raise Exception("para:"+name+" is not number!" )
self.__ParameterDic[name]=para['ParaValue']
__workspace_path = workspace_path
__input_paras = input_paras
return True,self.__ParameterDic
def check_output_paras(self, output_para_names):
"""
:param output_para_names :需要检查参数的名称列表[name1,name2,...]
:return: Ture or False
"""
workspace_path = self.__alg_xml_handle.get_workspace_path()
output_paras = self.__alg_xml_handle.get_output_paras()
for name in output_para_names:
para = output_paras[name]
#print(para)
if para is None:
msg = "check para:" + name + " is failed!" + "para is None!"
raise Exception(msg)
if para['ParaType'] == 'File':
if para['DataType'] == 'tif':
para_path = workspace_path + para['ParaValue']
para_dir = os.path.split(para_path)
flag_isdir = os.path.isdir(para_dir[0])
flag_istif = (para_dir[1].split(".", 1)[1] == "tif")
if flag_isdir and flag_istif is False:
msg = "check para:" + name + " is failed!" + para_path + "is invalid!"
raise Exception(msg)
if para['DataType'] == 'File':
para_path = workspace_path + para['ParaValue']
if os.path.isdir(para_path) is False:
os.makedirs(para_path)
if os.path.isdir(para_path) is False:
msg = "check para:" + name + " is failed!" + para_path + "is invalid!"
raise Exception(msg)
return True
@staticmethod
def __is_number(str_num):
"""
:param str_num :检查str是否为float或者double
:return: True or False
"""
if str_num[0] == '-':
str_num = str_num[1:]
pattern = re.compile(r'(.*)\.(.*)\.(.*)')
if pattern.match(str_num):
return False
return str_num.replace(".", "").isdigit()
def __check_tif(self, filename):
"""
:filename: 文件的路径
:return: True or False
"""
im_proj, im_geotrans, im_arr = self.imageHandler.read_img(filename)
im_scope = self.imageHandler.get_scope(filename)
if im_proj is None or im_geotrans is None or im_arr.size == 0 or im_scope is None:
msg = "im_proj is None or im_geotrans is None or im_arr.size == 0 or im_scope is None,finame: " + filename
raise Exception(msg)
return True

View File

@ -0,0 +1,35 @@
# -*- coding: UTF-8 -*-
"""
@Project onestar
@File ConfigeHandle.py
@Contacthttps://blog.csdn.net/songlh1234/article/details/83316468
@Author SHJ
@Date 2021/11/23 16:57
@Version 1.0.0
"""
import os
import configparser
class Config:
"""读写初始化配置文件"""
def __init__(self):
pass
@staticmethod
def get(para_name, option='config', config_name = 'config.ini'):
config = configparser.ConfigParser()
config_path = os.path.join(os.getcwd(), config_name)
config.read(config_path, encoding='utf-8')
config.sections()
exe_name = config.get(option, para_name)
return exe_name
if __name__ == '__main__':
# c = Config()
# a = c.get('exe_name')
# b = bool(c.get('debug'))
# d = int(c.get('cover_threshold'))
# f = float(c.get('ndvi_threshold'))
print('done')

127
dem-S-SAR/Dem.xml Normal file
View File

@ -0,0 +1,127 @@
<?xml version='1.0' encoding='utf-8'?>
<Root>
<TaskID>CSAR_202107275419_0001-0</TaskID>
<WorkSpace>D:\micro\WorkSpace\</WorkSpace>
<AlgCompt>
<DataTransModel>File</DataTransModel>
<Artificial>ElementAlg</Artificial>
<AlgorithmName>Dem-C-SAR-V2.2</AlgorithmName>
<DllName>Dem-C-SAR-V2.2.exe</DllName>
<ChsName>高程产品</ChsName>
<AlgorithmDesc>微波卫星3-5级产品生产模型</AlgorithmDesc>
<AlgorithmAlias>Dem-C-SAR-V2.2-1</AlgorithmAlias>
<Version>2.2</Version>
<AlgorithmClass>陆表类产品_高程产品</AlgorithmClass>
<AlgorithmLevel>5</AlgorithmLevel>
<AlgoirthmID>Dem_中科卫星应用德清研究院_2.2</AlgoirthmID>
<Author>中科卫星应用德清研究院</Author>
<Type>算法</Type>
<jdkVersion>1.8</jdkVersion>
<algDevlanguage>python</algDevlanguage>
<Environment>
<IsCluster>0</IsCluster>
<ClusterNum>0</ClusterNum>
<OperatingSystem>Windows10</OperatingSystem>
<CPU>单核</CPU>
<Memory>4GB</Memory>
<Storage>10GB</Storage>
<NetworkCard>无需求</NetworkCard>
<Bandwidth>无需求</Bandwidth>
<GPU>无需求</GPU>
</Environment>
<Utility Resolution="1" Satellite="GF3C" Sensor="MSS" />
<Inputs ParameterNum="5">
<Parameter>
<ParaName>MainImg</ParaName>
<ParaChsName>主影像时间</ParaChsName>
<Description>哨兵数据主影像的时间,来源于数据名称</Description>
<ParaType>Value</ParaType>
<DataType>string</DataType>
<ParaSource>Man</ParaSource>
<ParaValue>20200218</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
<Parameter>
<ParaName>SARS</ParaName>
<ParaChsName>SAR影像文件夹路径</ParaChsName>
<Description>哨兵1号数据存放的文件夹</Description>
<ParaType>File</ParaType>
<DataType>zip</DataType>
<ParaSource>Man</ParaSource>
<ParaValue>
G:\GF3极化干涉数据\2\GF3_SAY_QPSI_018565_E104.1_N30.6_20200218_L1A_AHV_L10004624982.tar.gz;
G:\GF3极化干涉数据\2\GF3_SAY_QPSI_018983_E104.1_N30.6_20200318_L1A_AHV_L10004680879.tar.gz
</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>File</Control>
<InputType>Satellite</InputType>
<InputNum>0</InputNum>
<DateFrom>S1A</DateFrom>
</Parameter>
<Parameter>
<ParaName>box</ParaName>
<ParaChsName>经纬度包围盒</ParaChsName>
<Description>经纬度包围盒SNWE。例子30.0;30.2;117.3;117.5 37;38.2;108.87;109.1</Description>
<ParaType>value</ParaType>
<DataType>string</DataType>
<ParaSource>Man</ParaSource>
<ParaValue>empty</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>True</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
<Parameter>
<ParaName>DEM</ParaName>
<ParaChsName>高程数据路径</ParaChsName>
<Description>高程数据数据。数据来源:30米 ASTGTM2, 数据格式tif。备注数据的经纬度范围必须是整数</Description>
<ParaType>File</ParaType>
<DataType>File</DataType>
<ParaSource>Man</ParaSource>
<ParaValue>G:\GF3极化干涉数据\DEM</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>File</Control>
<InputType>DEM</InputType>
<InputNum>0</InputNum>
<DateFrom>DEM</DateFrom>
</Parameter>
<Parameter>
<ParaName>EsdCoherenceThreshold</ParaName>
<ParaChsName>相干性阈值</ParaChsName>
<Description>'Coherence threshold for estimating azimuth misregistration using enhanced spectral
diversity
</Description>
<ParaType>Value</ParaType>
<DataType>string</DataType>
<ParaSource>Man</ParaSource>
<ParaValue>0.75</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>True</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
</Inputs>
<Outputs ParameterNum="1">
<Parameter>
<ParaName>DemProduct</ParaName>
<ParaChsName>高程产品</ParaChsName>
<Description>高程产品</Description>
<ParaType>File</ParaType>
<DataType>tar.gz</DataType>
<ParaSource>Cal</ParaSource>
<ParaValue>D:\micro\WorkSpace\Dem\Output\GF3_SAY_QPSI_018565_E104.1_N30.6_20200218_L1A_AHV_L10004624982-DEM.tar.gz</ParaValue>
</Parameter>
</Outputs>
</AlgCompt>
</Root>

409
dem-S-SAR/DemImageHandle.py Normal file
View File

@ -0,0 +1,409 @@
# -*- coding: UTF-8 -*-
"""
@Project microproduct
@File Imagehandle.py
@Function 实现对待处理SAR数据的读取格式标准化和处理完后保存文件功能
@Author SHJ
@Date 2021/10/15
@Version 1.0.0
"""
import logging
import os
from xml.etree.ElementTree import ElementTree
from osgeo import gdal
import numpy as np
from PIL import Image
import cv2
logger = logging.getLogger("mylog")
class DemImageHandler:
"""
影像读取编辑保存
"""
def __init__(self):
pass
@staticmethod
def get_dataset(filename):
"""
:param filename: tif路径
:return: 图像句柄
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
return dataset
def get_scope(self, filename):
"""
:param filename: tif路径
:return: 图像范围
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_scope = self.cal_img_scope(dataset)
del dataset
return im_scope
@staticmethod
def get_projection(filename):
"""
:param filename: tif路径
:return: 地图投影信息
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_proj = dataset.GetProjection()
del dataset
return im_proj
@staticmethod
def get_geotransform(filename):
"""
:param filename: tif路径
:return: 从图像坐标空间也称为像素线到地理参考坐标空间投影或地理坐标的仿射变换
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
geotransform = dataset.GetGeoTransform()
del dataset
return geotransform
@staticmethod
def get_bands(filename):
"""
:param filename: tif路径
:return: 影像的波段数
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
bands = dataset.RasterCount
del dataset
return bands
@staticmethod
def get_band_array(filename, num=1):
"""
:param filename: tif路径
:param num: 波段序号
:return: 对应波段的矩阵数据
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
bands = dataset.GetRasterBand(num)
array = bands.ReadAsArray(0, 0, bands.XSize, bands.YSize)
del dataset
return array
@staticmethod
def get_data(filename):
"""
:param filename: tif路径
:return: 获取所有波段的数据
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_width = dataset.RasterXSize
im_height = dataset.RasterYSize
im_data = dataset.ReadAsArray(0, 0, im_width, im_height)
del dataset
return im_data
@staticmethod
def get_img_width(filename):
"""
:param filename: tif路径
:return: 影像宽度
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
width = dataset.RasterXSize
del dataset
return width
@staticmethod
def get_img_height(filename):
"""
:param filename: tif路径
:return: 影像高度
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
height = dataset.RasterYSize
del dataset
return height
@staticmethod
def read_img(filename):
"""
影像读取
:param filename:
:return:
"""
gdal.AllRegister()
img_dataset = gdal.Open(filename) # 打开文件
if img_dataset is None:
msg = 'Could not open ' + filename
logger.error(msg)
return None, None, None
im_proj = img_dataset.GetProjection() # 地图投影信息
if im_proj is None:
return None, None, None
im_geotrans = img_dataset.GetGeoTransform() # 仿射矩阵
im_width = img_dataset.RasterXSize # 栅格矩阵的行数
im_height = img_dataset.RasterYSize # 栅格矩阵的行数
im_arr = img_dataset.ReadAsArray(0, 0, im_width, im_height)
del img_dataset
return im_proj, im_geotrans, im_arr
def cal_img_scope(self, dataset):
"""
计算影像的地理坐标范围
根据GDAL的六参数模型将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param dataset :GDAL地理数据
:return: list[point_upleft, point_upright, point_downleft, point_downright]
"""
if dataset is None:
return None
img_geotrans = dataset.GetGeoTransform()
if img_geotrans is None:
return None
width = dataset.RasterXSize # 栅格矩阵的列数
height = dataset.RasterYSize # 栅格矩阵的行数
point_upleft = self.trans_rowcol2geo(img_geotrans, 0, 0)
point_upright = self.trans_rowcol2geo(img_geotrans, width, 0)
point_downleft = self.trans_rowcol2geo(img_geotrans, 0, height)
point_downright = self.trans_rowcol2geo(img_geotrans, width, height)
return [point_upleft, point_upright, point_downleft, point_downright]
@staticmethod
def trans_rowcol2geo(img_geotrans,img_col, img_row):
"""
据GDAL的六参数模型仿射矩阵将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param img_geotrans: 仿射矩阵
:param img_col:图像纵坐标
:param img_row:图像横坐标
:return: [geo_x,geo_y]
"""
geo_x = img_geotrans[0] + img_geotrans[1] * img_col + img_geotrans[2]* img_row
geo_y = img_geotrans[3] + img_geotrans[4] * img_col + img_geotrans[5]* img_row
return [geo_x, geo_y]
@staticmethod
def write_img(filename, im_proj, im_geotrans, im_data):
"""
影像保存
:param filename:
:param im_proj:
:param im_geotrans:
:param im_data:
:return:
"""
gdal_dtypes = {
'int8': gdal.GDT_Byte,
'unit16': gdal.GDT_UInt16,
'int16': gdal.GDT_Int16,
'unit32': gdal.GDT_UInt32,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64,
}
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
datatype = gdal_dtypes[im_data.dtype.name]
else:
datatype = gdal.GDT_Float32
# 判读数组维数
if len(im_data.shape) == 3:
im_bands,im_height, im_width, = im_data.shape
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
if os.path.exists(os.path.split(filename)[0]) is False:
os.makedirs(os.path.split(filename)[0])
driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
if im_bands == 1:
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
else:
for i in range(im_bands):
# dataset.GetRasterBand(i + 1).WriteArray(im_data[:, :, im_bands - 1 - i])
dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
del dataset
# 写GeoTiff文件
@staticmethod
def write_img_rpc(filename, im_proj, im_geotrans, im_data, rpc_dict):
"""
图像中写入rpc信息
"""
# 判断栅格数据的数据类型
if 'int8' in im_data.dtype.name:
datatype = gdal.GDT_Byte
elif 'int16' in im_data.dtype.name:
datatype = gdal.GDT_Int16
else:
datatype = gdal.GDT_Float32
# 判读数组维数
if len(im_data.shape) == 3:
im_bands, im_height, im_width = im_data.shape
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
driver = gdal.GetDriverByName("GTiff")
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
# 写入RPC参数
for k in rpc_dict.keys():
dataset.SetMetadataItem(k, rpc_dict[k], 'RPC')
if im_bands == 1:
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
else:
for i in range(im_bands):
dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
del dataset
def transtif2mask(self,out_tif_path, in_tif_path, threshold):
"""
:param out_tif_path:输出路径
:param in_tif_path:输入的路径
:param threshold:阈值
"""
im_proj, im_geotrans, im_arr, im_scope = self.read_img(in_tif_path)
im_arr_mask = (im_arr < threshold).astype(int)
self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask)
@staticmethod
def get_polarization(IW1):
"""
读取极化方式
:param IW1:
:return:
"""
tree = ElementTree()
tree.parse(IW1)
root = tree.getroot()
RadarCenterFrequency = root.find('component').find('component').find('component')
element_trees = list(RadarCenterFrequency)
value = 0
for element in element_trees:
if len(element.attrib) == 1:
if element.attrib["name"] == "polarization":
value = element.find('value').text
return value
def write_quick_view(self, tif_path, color_img=False, quick_view_path=None):
"""
生成快视图,默认快视图和影像同路径且同名
:param tif_path:影像路径
:param color_img:是否生成随机伪彩色图
:param quick_view_path:快视图路径
"""
if quick_view_path is None:
quick_view_path = os.path.splitext(tif_path)[0]+'.jpg'
n = self.get_bands(tif_path)
if n == 1: # 单波段
t_data = self.get_data(tif_path)
else: # 多波段,转为强度数据
t_data = self.get_data(tif_path)
t_data = t_data.astype(float)
t_data = np.sqrt(t_data[0] ** 2 + t_data[1] ** 2)
t_r = self.get_img_height(tif_path)
t_c = self.get_img_width(tif_path)
if t_r > 10000 or t_c > 10000:
q_r = int(t_r / 10)
q_c = int(t_c / 10)
elif 1024 < t_r < 10000 or 1024 < t_c < 10000:
if t_r > t_c:
q_r = 1024
q_c = int(t_c/t_r * 1024)
else:
q_c = 1024
q_r = int(t_r/t_c * 1024)
else:
q_r = t_r
q_c = t_c
if color_img is True:
# 生成伪彩色图
img = np.zeros((t_r, t_c, 3), dtype=np.uint8) # (高,宽,维度)
u = np.unique(t_data)
for i in u:
if i != 0:
w = np.where(t_data == i)
img[w[0], w[1], 0] = np.random.randint(0, 255) # 随机生成一个0到255之间的整数 可以通过挑参数设定不同的颜色范围
img[w[0], w[1], 1] = np.random.randint(0, 255)
img[w[0], w[1], 2] = np.random.randint(0, 255)
img = cv2.resize(img, (q_c, q_r)) # (宽,高)
cv2.imwrite(quick_view_path, img)
# cv2.imshow("result4", img)
# cv2.waitKey(0)
else:
# 灰度图
min = np.nanmin(t_data)
max = np.nanmax(t_data)
t_data[np.isnan(t_data)] = max
# if (max - min) < 256:
t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
out_img.save(quick_view_path)
# if __name__ == '__main__':
# ih = ImageHandler()
# path = 'D:\Dual1_1_feature1.tif'
# # ih.write_quick_view(path, color_img=False)
# print('done')

586
dem-S-SAR/DemMain.py Normal file
View File

@ -0,0 +1,586 @@
"""
@Project microproduct
@File DemMain.py
@Function 高程
@Author SHJ
@Contactk
@Date 2021/9/1
@Version 1.0.0
"""
import datetime
import logging
import os
import shutil
import sys
import tarfile
# import matplotlib as mpl #解决打包文件
from tool.algorithm.algtools.MetaDataHandler import MetaDataHandler
from tool.algorithm.block.blockprocess import BlockProcess
from tool.algorithm.xml.AlgXmlHandle import CheckSource, ManageAlgXML
from DemXmlInfo import CreateDict, CreateStadardXmlFile
from tool.algorithm.xml.CreatMetafile import CreateMetafile
from tool.algorithm.xml.CreateMetaDict import CreateMetaDict, CreateProductXml
from tool.file.fileHandle import fileHandle
from tool.algorithm.image.ImageHandle import ImageHandler
from DemImageHandle import DemImageHandler
from OrthoAlg import IndirectOrthorectification
from autorun import auto_run_main
import geocoding as dem
from logHandler import LogHandler
from ConfigeHandle import Config as cf
import sklearn.neighbors._typedefs
import sklearn.neighbors._partition_nodes
EXE_NAME = cf.get('exe_name')
if cf.get('debug') == 'True':
DEBUG = True
else:
DEBUG = False
file = fileHandle(DEBUG)
tar = r'-' + cf.get('tar')
alks = cf.get('alks')
rlks = cf.get('rlks')
productLevel = cf.get('productLevel')
LogHandler.init_log_handler('run_log\\' + EXE_NAME)
logger = logging.getLogger("mylog")
# env_str = os.path.split(os.path.realpath(__file__))[0]
env_str = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PROJ_LIB'] = env_str
class DemMain:
"""
干涉形变主函数
"""
def __init__(self, alg_xml_path):
self.alg_xml_path = os.path.join(os.getcwd(), alg_xml_path)
self.__alg_xml_handler = ManageAlgXML(alg_xml_path)
self.__check_handler = CheckSource(self.__alg_xml_handler)
self.imageHandler = DemImageHandler()
self.__workspace_path = None
self.__input_paras = {}
self.__output_paras = {}
self.__in_processing_paras = {}
self.__out_para = None
self.__tif_names_list = []
def check_source(self):
"""
检查算法相关的配置文件图像辅助文件是否齐全
"""
self.env_str = os.getcwd()
logger.info("sysdir: %s", self.env_str)
if self.__check_handler.check_alg_xml() is False:
raise Exception('check_alg_xml() failed!')
if self.__check_handler.check_run_env() is False:
raise Exception('check_run_env() failed!')
input_para_names = ["SARS", "DEM"]
if self.__check_handler.check_input_paras(input_para_names) is False:
raise Exception('check_input_paras() failed!')
# 创建工作区
self.__workspace_path = self.__alg_xml_handler.get_workspace_path()
self.__create_work_space()
# 配置输入参数
self.__input_paras = self.__alg_xml_handler.get_input_paras()
self.__in_processing_paras = self.__init_processing_paras(self.__input_paras, self.__workspace_preprocessed_path)
aux_path = os.path.join(self.__workspace_origin_path, "AuxDir")
if os.path.exists(aux_path) is False:
os.mkdir(aux_path)
self.__in_processing_paras.update({'AuxDir': aux_path})
# 写入输出参数
SrcImagePath = self.__input_paras["SARS"]['ParaValue']
paths = SrcImagePath.split(';')
SrcImageName = os.path.split(paths[0])[1].split('.tar.gz')[0]
result_name = SrcImageName + tar + ".tar.gz"
self.__out_para = os.path.join(self.__workspace_path, EXE_NAME, 'Output', result_name)
self.__alg_xml_handler.write_out_para("DemProduct", self.__out_para)
logger.info('check_source success!')
logger.info('progress bar: 5%')
return True
def __init_processing_paras(self, names, out_path):
"""
:param names:字典列表每个字典为一个输入产品的配置信息
"""
processing_paras = {}
for name in names:
para = self.__input_paras[name]
if para is None:
logger.error(name + "is None!")
return False
if name == 'SARS':
name_list = []
if para['DataType'] == 'File':
processing_paras.update({'slc': para['ParaValue']})
else:
para_path_list = para['ParaValue'].split(";")
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
if os.path.exists(dem_path) is False:
os.mkdir(dem_path)
for file_path in para_path_list:
tif_name = os.path.basename(file_path)
shutil.copy(file_path, os.path.join(dem_path, tif_name))
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'slc': para_path})
# if len(para_path_list) != 0:
# dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
# if os.path.exists(dem_path) is False:
# os.mkdir(dem_path)
# for file_path in para_path_list:
# name = os.path.split(file_path)[1].rstrip('.tar.gz')
# file_dir = os.path.join(dem_path + '\\')
# file.de_targz(file_path, file_dir)
# # para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
# name_list.append(name)
# processing_paras.update({name: os.path.join(file_dir, name)})
# processing_paras.update({'name_list': name_list})
if name == 'DEM':
if para['DataType'] == 'File':
processing_paras.update({'dem': para['ParaValue']})
elif para['DataType'] == 'zip':
para_path_list = para['ParaValue'].split(";")
if len(para_path_list) != 0:
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
if os.path.exists(dem_path) is False:
os.mkdir(dem_path)
for file_path in para_path_list:
BlockProcess.unzip_dem(file_path, dem_path)
# tif_name = os.path.basename(file_path)
# shutil.copy(file_path, os.path.join(dem_path, tif_name))
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'dem': para_path})
else:
para_path_list = para['ParaValue'].split(";")
if len(para_path_list) != 0:
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
if os.path.exists(dem_path) is False:
os.mkdir(dem_path)
for file_path in para_path_list:
tif_name = os.path.basename(file_path)
shutil.copy(file_path, os.path.join(dem_path, tif_name))
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'dem': para_path})
if name == 'Orbits':
if para['DataType'] == 'File':
processing_paras.update({'orbits': para['ParaValue']})
else:
para_path_list = para['ParaValue'].split(";")
if len(para_path_list) != 0:
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
if os.path.exists(dem_path) is False:
os.mkdir(dem_path)
for file_path in para_path_list:
tif_name = os.path.basename(file_path)
shutil.copy(file_path, os.path.join(dem_path, tif_name))
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'orbits': para_path})
if name == 'MainImg':
processing_paras.update({'mainimg': para['ParaValue']})
if name == 'box':
if para['ParaValue'] == 'empty':
processing_paras.update({'box': 'empty'})
else:
datas = para['ParaValue'].split(';')
if len(datas) != 4:
msg = 'para: box is error!box:' + para['ParaValue']
raise Exception(msg)
box = datas[0] + ' ' + datas[1] + ' ' + datas[2] + ' ' + datas[3]
processing_paras.update({'box': box})
if name == 'AuxDir':
if para['DataType'] == 'File':
processing_paras.update({'AuxDir': para['ParaValue']})
else:
para_path_list = para['ParaValue'].split(";")
if len(para_path_list) != 0:
para_path = os.path.dirname(para_path_list[0])
processing_paras.update({'AuxDir': para_path})
if name == 'NumConnections':
processing_paras.update({'NumConnections': para['ParaValue']})
if name == 'EsdCoherenceThreshold':
processing_paras.update({'EsdCoherenceThreshold': para['ParaValue']})
return processing_paras
def verifyAndModifyWgsXml(self, xmlPath, demPath):
import xml.dom.minidom as xmldom
domobj = xmldom.parse(xmlPath)
rootNode = domobj.documentElement
# 获得子标签
propertyElementObj = rootNode.getElementsByTagName("property")
for property in propertyElementObj:
if property.hasAttribute("name"):
if property.getAttribute("name") == "file_name":
pathNode = property.getElementsByTagName("value")[0]
pathInxml = pathNode.childNodes[0].data
print('pathInxml1:', pathInxml)
pathNode.childNodes[0].data = r"/".join(demPath.split("\\"))
pathInxml = pathNode.childNodes[0].data
print('pathInxml2:', pathInxml)
with open(xmlPath, 'w') as f:
# 缩进换行编码
domobj.writexml(f, addindent=' ', encoding='utf-8')
def __create_work_space(self):
"""
删除原有工作区文件夹,创建新工作区文件夹
"""
self.__workspace_preprocessed_path = self.__workspace_path + EXE_NAME + r"\Temporary\preprocessed""\\"
self.__workspace_processing_path = self.__workspace_path + EXE_NAME + r"\Temporary\processing""\\"
self.__workspace_isce_path = os.path.join(self.__workspace_processing_path, 'isce_workspace')
self.__workspace_dem_path = os.path.join(self.__workspace_preprocessed_path, 'dem')
self.__product_dic = self.__workspace_processing_path + 'product\\'
self.__workspace_origin_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "origin")
path_list = [self.__workspace_preprocessed_path, self.__workspace_processing_path, self.__workspace_isce_path,
self.__workspace_dem_path, self.__product_dic, self.__workspace_origin_path]
for path in path_list:
if os.path.exists(path):
# if DEBUG is True:
# continue
self.del_floder(path)
os.makedirs(path)
else:
os.makedirs(path)
logger.info('create new workspace success!')
def del_file(self, path_data):
"""
只删除文件不删除文件夹
"""
if DEBUG is True:
return
for i in os.listdir(path_data): # os.listdir(path_data)#返回一个列表,里面是当前目录下面的所有东西的相对路径
file_data = path_data + "\\" + i # 当前文件夹的下面的所有东西的绝对路径
if os.path.isfile(file_data) is True: # os.path.isfile判断是否为文件,如果是文件,就删除.如果是文件夹.递归给del_file.
os.remove(file_data)
else:
self.del_file(file_data)
@staticmethod
def del_floder(dic):
"""
删除整个文件夹
"""
# if DEBUG is True:
# return
if os.path.isdir(dic):
shutil.rmtree(dic)
@staticmethod
def make_targz(output_filename, source_dir):
"""
一次性打包整个根目录空子目录会被打包
如果只打包不压缩"w:gz"参数改为"w:""w"即可
:param output_filename:输出压缩包的完整路径eg:'E:\test.tar.gz'
:param source_dir:需要打包的跟目录eg: 'E:\testFfile\'打包文件夹里面的所有文件,'E:\testFfile'打包文件夹
"""
dir = os.path.split(output_filename)[0]
if os.path.exists(dir) is False:
os.makedirs(dir)
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
def del_temp_workspace(self):
"""
临时工作区
"""
if DEBUG is True:
return
path = self.__workspace_path + EXE_NAME + r"\Temporary"
if os.path.exists(path):
self.del_floder(path)
def updateFile(self, out_file, in_file, old_str, new_str):
"""
替换文件中的字符串
:param out_file:输出文件名
:param in_file:输入文件名
:param old_str:就字符串
:param new_str:新字符串
:return:
"""
file_data = ""
with open(in_file, "r", encoding="utf-8") as f:
for line in f:
if old_str in line:
line = line.replace(old_str, new_str)
file_data += line
with open(out_file, "w", encoding="utf-8") as f:
f.write(file_data)
def isce_stackSentinel(self, slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space, key_word, isce_exe_dir, box,EsdCoherenceThreshold):
"""执行 stackSentinel.exe"""
os.chdir(isce_exe_dir)
# cmd = "stackSentinel.exe -s {} -d {} -a {} -o {} -w {} -n {} -m {} -c {} --exeabsolute_dir {} -b {}".format(slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space,"'1 2 3'", key_word, '1', isce_exe_dir,box)
# cmd = "stackSentinel.exe -s {} -d {} -a {} -o {} -w {} -n {} -m {} -W {} -c {} --exeabsolute_dir {} -b {}".format(slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space,"'1 2 3'", key_word,'interferogram','6', isce_exe_dir,box)
cmd = "stackSentinel.exe -s {} -d {} -a {} -o {} -w {} -n '1 2 3' -m {} -W 'interferogram' -c '1' --exeabsolute_dir {} -e {} -b {}".format(
slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space, key_word, isce_exe_dir,EsdCoherenceThreshold , box)
# cmd = "stackSentinel.exe -s {} -d {} -a {} -o {} -w {} -c {} -b {} -m {} -n '1 2 3' -p 'vv' -W offset -e {} --exeabsolute_dir {}".format(slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space, cum_connections, box, main_img, EsdCoherenceThreshold, isce_exe_dir)
# stackSentinel.exe -s I:/MicroProduct/daqiceshishuju_20221109/slc -d I:/MicroProduct/daqiceshishuju_20221109/DEM/demLat_N37_N40_Lon_E107_E112.dem.wgs84 -a I:/MicroProduct/daqiceshishuju_20221109/AuxDir -o I:/MicroProduct/daqiceshishuju_20221109/orbits -w I:/MicroProduct/daqiceshishuju_20221109/daqi_workspace/isce_workspace -n '1 2 3' -m 20190206 -W interferogram -c 6 --exeabsolute_dir I:/MicroProduct/microproduct/atmosphericDelay/ISCEApp -b '37.3 39.4 108.10 111.82'
logger.info('stackSentinel_cmd:{}'.format(cmd))
result = os.system(cmd)
logger.info('cmd_result:{}'.format(result))
logger.info('stackSentinel finish!')
return result
def unPackGF3(self, run_unPackGF3, isce_exe_dir):
# os.chdir(isce_exe_dir)
print(run_unPackGF3)
if not os.path.exists(run_unPackGF3):
raise Exception("run_unPackHJ2.txt not found!")
with open(run_unPackGF3, 'r', encoding='utf-8') as fp:
cmd_lines = fp.readlines()
for cmd_line in cmd_lines:
cmdStr = cmd_line.replace("\n", "")
pyFileName = cmdStr.split(' ')[0]
exeName = "{0}.exe".format(pyFileName.split('.')[0])
newCmdLine = cmdStr.replace(pyFileName, exeName)
print("cmd_txt:{0}".format(newCmdLine))
if len(newCmdLine) == 0:
print("cmd_line{0} cmd_txt is null".format(cmd_line))
continue
result = os.system(newCmdLine)
return result
def create_sim_ori(self):
dem_path = self.__in_processing_paras['dem']
dem_out_dir = os.path.join(self.__workspace_processing_path, 'merged_dem')
ortho_out_dir = os.path.join(self.__workspace_processing_path, 'ortho')
ortho_temp_dir = os.path.join(self.__workspace_processing_path, 'ortho_temp')
path_list = [dem_out_dir, ortho_out_dir, ortho_temp_dir]
for path in path_list:
if not os.path.exists(path):
os.mkdir(path)
path2 = env_str
key_word = self.__in_processing_paras['mainimg']
slc_paths = os.path.join(self.__workspace_origin_path, 'SARS', key_word)
dem_merged_path = ImageHandler.dem_merged(dem_path, dem_out_dir)
Orthorectification = IndirectOrthorectification(os.path.join(path2, "config.yaml"))
Orthorectification.IndirectOrthorectification(slc_paths, ortho_out_dir) # 改动1
in_slc_path = None
for slc_path in os.listdir(slc_paths):
if slc_path.find(".tiff") > 0 and (
slc_path.find("_HH_") > 0 or slc_path.find("_VV_") > 0 or slc_path.find("_DH_") > 0):
in_slc_path = os.path.join(slc_paths, slc_path)
break
# 获取校正模型后
Orthorectification.preCaldem_sar_rc(dem_merged_path, in_slc_path, ortho_temp_dir, ortho_out_dir.replace("\\", "\\\\"))
#构建轨道
sim_ori_path = ortho_out_dir + "\\" + "RD_sim_ori.tif"
return sim_ori_path
def creat_xml(self, dem_proPath):
"""
生成元文件案例
product_path: 大气延迟校正产品输出的影像文件路径
"""
# os.chdir(env_str)
model_path = os.path.join(env_str, "product.xml")
tem_folder = self.__workspace_path + EXE_NAME + r"\Temporary""\\"
image_path = dem_proPath
out_path1 = os.path.join(tem_folder, "trans_geo_projcs.tif")
out_path2 = os.path.join(tem_folder, "trans_projcs_geo.tif")
SrcImagePath = self.__input_paras["SARS"]['ParaValue']
paths = SrcImagePath.split(';')
SrcImageName = os.path.split(paths[0])[1].split('.tar.gz')[0]
meta_xml_path = os.path.join(self.__product_dic, SrcImageName + tar + ".meta.xml")
para_dict = CreateMetaDict(image_path, self.ori_xml, self.__product_dic,
out_path1, out_path2).calu_nature()
para_dict.update({"imageinfo_ProductName": "高程产品"})
para_dict.update({"imageinfo_ProductIdentifier": "DEM"})
para_dict.update({"imageinfo_ProductLevel": productLevel})
para_dict.update({"ProductProductionInfo_BandSelection": "1,2"})
para_dict.update({"ProductProductionInfo_AuxiliaryDataDescription": "DEM"})
CreateProductXml(para_dict, model_path, meta_xml_path).create_standard_xml()
return meta_xml_path
def isce_run_steps(self, run_steps, target):
for i in range(0, len(run_steps)):
uwm_file = os.path.join(self.__workspace_isce_path, "run_files", run_steps[i])
shutil.move(uwm_file, target)
def process_handle(self,start):
# 执行isce2.5生成干涉图
# 生成工作流配置文件
dem_dir = r"/".join(self.__in_processing_paras['dem'].split("\\"))
isce_work_space = r"/".join(self.__workspace_isce_path.split("\\"))
isce_work_space = '/cygdrive/' + isce_work_space.replace(":/", "/")
box = "'" + self.__in_processing_paras['box'] + "'"
main_img = self.__in_processing_paras['mainimg']
isce_exe_dir = r"/".join(os.path.join(self.env_str, "ISCEApp").split("\\"))
EsdCoherenceThreshold = self.__in_processing_paras['EsdCoherenceThreshold']
os.chdir(isce_exe_dir)
### 转换tif影像为 wgs84格式
dem_dir = '/cygdrive/' + dem_dir.replace(":/", "/")
out_dem_dir = self.__workspace_dem_path
out_dem_dir = '/cygdrive/' + out_dem_dir.replace(":\\", "/")
# cmd = "demhgt2wgs.exe --tif_path {} --hgt_path {} --ASTGTM2".format(dem_dir, out_dem_dir)
cmd = "DEM2ISCE.exe -s {} -o {}".format(dem_dir, out_dem_dir)
logger.info('demhgt2wgs_cmd:{}'.format(cmd))
result = os.system(cmd)
logger.info('cmd_result:{}'.format(result))
import glob
in_tif_paths = list(glob.glob(os.path.join(self.__workspace_dem_path, '*.wgs84')))
if in_tif_paths == []:
raise Exception('demhgt2wgs.exe run failed!')
dem_path = r"/".join(in_tif_paths[0].split("\\"))
dem_path = '/cygdrive/' + dem_path.replace(":/", "/")
logger.info('demhgt2wgs finish!')
logger.info('progress bar: 5%')
# cum_connections = self.__in_processing_paras['NumConnections']
# cmd = "stackSentinel.exe -s {} -d {} -a {} -o {} -w {} -c {} -b {} -m {} -n '1 2 3' -p 'vv' -W offset -e {} --exeabsolute_dir {}".format(slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space, cum_connections, box, main_img, EsdCoherenceThreshold, isce_exe_dir)
# result = self.isce_stackSentinel(slc_dir, dem_path, aux_dir, orbits_dir, isce_work_space, main_img,
# isce_exe_dir, box, EsdCoherenceThreshold)
# os.chdir(isce_exe_dir)
# slc数据转isce格式
slc_dir = r"/".join(self.__in_processing_paras['slc'].split("\\")) + "/"
slc_dir = '/cygdrive/' + slc_dir.replace(":/", "/")
out_slc_dir = r"/".join(os.path.join(self.__workspace_preprocessed_path, 'slc').split("\\")) + "/"
if not os.path.exists(out_slc_dir):
os.mkdir(out_slc_dir)
out_slc_dir = '/cygdrive/' + out_slc_dir.replace(":/", "/")
cmd = "prepSlcHJ2.exe -i {} -o {}".format(slc_dir, out_slc_dir)
logger.info('prepSlcHJ2_cmd:{}'.format(cmd))
result = os.system(cmd)
logger.info('cmd_result:{}'.format(result))
run_unPackHJ2 = os.path.join(self.__workspace_origin_path, 'SARS', 'run_unPackHJ2.txt')
result = self.unPackGF3(run_unPackHJ2, isce_exe_dir)
logger.info('unpackFrame_HJ2_cmd:{}'.format(cmd))
logger.info('slc to isce_data finish!')
logger.info('progress bar: 10%')
if self.__in_processing_paras['box'] == 'empty':
box = ''
cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -u 'snaphu' -b '50000' --nofocus".format(out_slc_dir,
isce_work_space,
dem_path,
main_img, alks,
rlks)
logger.info('stackStripMap_cmd:{}'.format(cmd))
result = os.system(cmd)
logger.info('cmd_result:{}'.format(result))
logger.info('stackStripMap finish!')
run_files = os.path.join(self.__workspace_isce_path, 'run_files')
for file in list(glob.glob(os.path.join(run_files, '*.job'))):
os.remove(file)
run_steps = ["run_07_grid_baseline"]
self.isce_run_steps(run_steps, self.__workspace_isce_path)
else:
box = "'" + self.__in_processing_paras['box'] + "'"
cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -x {} -u 'snaphu' -b '50000' --nofocus".format(
out_slc_dir, isce_work_space, dem_path, main_img, alks, rlks, box)
logger.info('stackStripMap_cmd:{}'.format(cmd))
result = os.system(cmd)
logger.info('cmd_result:{}'.format(result))
logger.info('stackStripMap finish!')
run_files = os.path.join(self.__workspace_isce_path, 'run_files')
for file in list(glob.glob(os.path.join(run_files, '*.job'))):
os.remove(file)
run_steps = ["run_08_grid_baseline"]
self.isce_run_steps(run_steps, self.__workspace_isce_path)
cmd = ['-e', isce_exe_dir, '-o', self.__workspace_isce_path]
logger.info('autorun_cmd:{}'.format(cmd))
auto_run_main(cmd)
logger.info('cmd_result:{}'.format(result))
if result != 0:
raise Exception('autorun.py run failed!')
# 直接调用
# sys.argv.extend(['-e' + isce_exe_dir, '-o' + isce_work_space])
# logger.info('autorun_cmd:{}'.format(sys.argv))
# auto_run_main(sys.argv[1:])
logger.info('autorun_cmd success!')
logger.info('progress bar: 90%')
# sim_ori_path = self.create_sim_ori() #todo 正射模块
temp_path = self.__workspace_isce_path + "\\dem_temp"
if not os.path.exists(temp_path):
os.mkdir(temp_path)
out_dir = os.path.join(self.__workspace_path, EXE_NAME, 'Output')
# dem_product = os.path.join(self.__workspace_path, EXE_NAME, 'Output','dem_def.tiff')
SrcImagePath = self.__input_paras["SARS"]['ParaValue']
paths = SrcImagePath.split(';')
SrcImageName = os.path.split(paths[0])[1].split('.tar.gz')[0]
result_name = SrcImageName + tar + ".tiff"
dem_product = os.path.join(self.__product_dic, result_name)
slc_paths = os.path.join(self.__workspace_origin_path, 'SARS', main_img)
for file in os.listdir(slc_paths):
if file.endswith('.xml'):
self.ori_xml = os.path.join(slc_paths, file)
lamda = MetaDataHandler.get_lamda(self.ori_xml)
dem_proPath = dem.get_Dem(self.__workspace_isce_path, temp_path,out_dir, dem_product, lamda) # 生成tif
# dem_proPath = dem.get_Dem(self.__workspace_isce_path, temp_path,out_dir, dem_product) # 生成tif
self.imageHandler.write_quick_view(dem_product) # 生成快视图
meta_xml_path = self.creat_xml(dem_proPath)
temp_folder = os.path.join(self.__workspace_path, EXE_NAME, 'Output')
out_xml = os.path.join(temp_folder, os.path.basename(meta_xml_path))
if os.path.exists(temp_folder) is False:
os.mkdir(temp_folder)
shutil.copy(meta_xml_path, out_xml)
self.make_targz(self.__out_para, self.__product_dic)
logger.info("write quick view and .tar.gz finish")
logger.info('DEM production successful !')
logger.info('progress bar: 100%')
return True
if __name__ == '__main__':
start = datetime.datetime.now()
try:
if len(sys.argv) < 2:
xml_path = r'Dem_S_SAR_V3.xml'
else:
xml_path = sys.argv[1]
Main = DemMain(xml_path)
if not Main.check_source():
raise Exception('check_source() failed!')
if not Main.process_handle(start):
raise Exception('process_handle() failed!')
logger.info('successful production of Deformation products!')
except Exception:
logger.exception("run-time error!")
finally:
Main.del_temp_workspace()
# pass
end = datetime.datetime.now()
msg = 'running use time: %s ' % (end - start)
logger.info(msg)

64
dem-S-SAR/DemMain.spec Normal file
View File

@ -0,0 +1,64 @@
# -*- mode: python ; coding: utf-8 -*-
import sys
from shutil import copy
import os
cwdpath = os.getcwd()
toolDir = os.path.join(cwdpath, 'tool')
if os.path.exists(toolDir):
os.remove(toolDir)
os.mkdir(toolDir)
source_folder = '../tool'
def copy_file(path_read, path_write):
names = os.listdir(path_read)
for name in names:
path_read_new = os.path.join(path_read, name)
path_write_new = os.path.join(path_write, name)
if os.path.isdir(path_read_new):
if not os.path.exists(path_write_new):
os.mkdir(path_write_new)
copy_file(path_read_new, path_write_new)
else:
copy(path_read_new, path_write_new)
copy_file(source_folder, toolDir)
block_cipher = None
a = Analysis(['DemMain.py'],
pathex=[],
binaries=[],
datas=[('D:/ANACONDA/envs/micro/Lib/site-packages/dask/dask.yaml', './dask'), ('D:/ANACONDA/envs/micro/Lib/site-packages/distributed/distributed.yaml', './distributed')],
hiddenimports=['pyproj._compat'],
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='DemMain',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True,
disable_windowed_traceback=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None )

217
dem-S-SAR/DemXmlInfo.py Normal file
View File

@ -0,0 +1,217 @@
"""
@Project microproduct
@File AtmosphericDelayXmlInfo.PY
@Function 主函数
@Author LMM
@Date 2021/10/19 14:39
@Version 1.0.0
"""
from xml.etree.ElementTree import ElementTree
import shutil
from tool.algorithm.image.ImageHandle import ImageHandler
from tool.algorithm.algtools.PreProcess import PreProcess as pp
from osgeo import gdal
import numpy as np
import datetime
class CreateDict:
"""根据影像信息输出属性字典"""
def __init__(self, image_path, image_pair, out_path1, out_path2):
self.ImageHandler = ImageHandler()
self.para_dict = {}
self.image_path = image_path
self.out_path = out_path1
self.out_path2 = out_path2
self.image_pair = image_pair
pass
def calu_nature(self, start):
"""存入字典"""
imageinfo_width = self.ImageHandler.get_img_width(self.image_path)
self.para_dict.update({"imageinfo_width": imageinfo_width})
imageinfo_height = self.ImageHandler.get_img_height(self.image_path)
self.para_dict.update({"imageinfo_height": imageinfo_height})
# imageinfo_EarthModel=self.ImageHandler.get_projection(self.image_path).split("SPHEROID[", 2)[1].split(",", 2)[0]
# self.para_dict.update({"imageinfo_EarthModel":imageinfo_EarthModel.replace('"','')})
self.para_dict.update({"imageinfo_EarthModel": "WGS84"})
# imageinfo_ProjectModel = self.ImageHandler.get_projection(self.image_path).split("DATUM[", 2)[1].split(",", 2)[0]
# self.para_dict.update({"imageinfo_ProjectModel":imageinfo_ProjectModel.replace('"','')})
self.para_dict.update({"imageinfo_ProjectModel": "UTM"})
proj = self.ImageHandler.get_projection(self.image_path) # 输出的影像若是投影坐标系则先转成地理坐标系
keyword = proj.split("[", 2)[0] # 若是地理坐标系则pass
if keyword == "GEOGCS":
pass
elif keyword == "PROJCS":
pp.trans_projcs2geogcs(self.out_path2, self.image_path)
self.image_path = self.out_path2
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
raise Exception('image projection is missing!')
pp.trans_geogcs2projcs(self.out_path, self.image_path) # 坐标投影, 地理转平面投影坐标
imageinfo_widthspace = self.ImageHandler.get_geotransform(self.out_path)[1] # 投影后的分辨率
imageinfo_heightspace = -self.ImageHandler.get_geotransform(self.out_path)[5] # 投影后的分辨率
self.para_dict.update({"imageinfo_widthspace": imageinfo_widthspace})
self.para_dict.update({"imageinfo_heightspace": imageinfo_heightspace})
self.para_dict.update({"NominalResolution": imageinfo_widthspace})
WidthInMeters = imageinfo_width * imageinfo_widthspace # 投影后的分辨率×宽度
self.para_dict.update({"WidthInMeters": WidthInMeters})
# image_array = self.ImageHandler.get_band_array(self.image_path)
# a2 = np.where(np.isnan(image_array), 999999, image_array)
# MinValue = np.min(a2)
# a3 = np.where(np.isnan(image_array), -999999, image_array)
# MaxValue = np.max(a3)
#
# self.para_dict.update({"MaxValue":MaxValue})
# self.para_dict.update({"MinValue":MinValue})
get_scope = self.ImageHandler.get_scope(self.image_path)
point_upleft, point_upright, point_downleft, point_downright = get_scope[0], get_scope[1], get_scope[2], \
get_scope[3]
self.para_dict.update({"imageinfo_corner_topLeft_latitude": point_upleft[1]})
self.para_dict.update({"imageinfo_corner_topLeft_longitude": point_upleft[0]})
self.para_dict.update({"imageinfo_corner_topRight_latitude": point_upright[1]})
self.para_dict.update({"imageinfo_corner_topRight_longitude": point_upright[0]})
self.para_dict.update({"imageinfo_corner_bottomLeft_latitude": point_downleft[1]})
self.para_dict.update({"imageinfo_corner_bottomLeft_longitude": point_downleft[0]})
self.para_dict.update({"imageinfo_corner_bottomRight_latitude": point_downright[1]})
self.para_dict.update({"imageinfo_corner_bottomRight_longitude": point_downright[0]})
longitude_max = np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).max()
longitude_min = np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).min()
latitude_max = np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).max()
latitude_min = np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).min()
imageinfo_center_latitude = (latitude_max + latitude_min) / 2
imageinfo_center_longitude = (longitude_max + longitude_min) / 2
self.para_dict.update({"imageinfo_center_latitude": imageinfo_center_latitude})
self.para_dict.update({"imageinfo_center_longitude": imageinfo_center_longitude})
# self.para_dict.update({"productType": "GTC"}) # 设置产品类型
self.para_dict.update({"productFormat": "TIF"})
productGentime = datetime.datetime.now()
self.para_dict.update({"productGentime": productGentime})
msg = productGentime - start
self.para_dict.update({"productConsumeTime": msg})
self.para_dict.update({"unit": "none"}) # 设置单位
self.para_dict.update({"NoDataValue": "nan"})
self.para_dict.update({"productLevel": "5"}) # 设置图像位深度
image_array = self.ImageHandler.get_band_array(self.image_path)
try:
gdal_dtypes = {
'int8': gdal.GDT_Byte,
'unit16': gdal.GDT_UInt16,
'int16': gdal.GDT_Int16,
'unit32': gdal.GDT_UInt32,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64,
}
bit_dtypes = {
'int8': 8,
'unit16': 16,
'int16': 16,
'unit32': 32,
'int32': 32,
'float32': 32,
'float64': 64,
}
if not gdal_dtypes.get(image_array.dtype.name, None) is None:
bit_num = str(bit_dtypes[image_array.dtype.name])
datatype = bit_num + "bit"
else:
datatype = str(32) + "bit"
# datatype = str(gdal.GDT_Float32)+"bit"
self.para_dict.update({"imagebit": datatype})
except Exception:
self.para_dict.update({"imagebit": "None"})
HH, HV, VH, VV = self.image_pair[0], self.image_pair[1], self.image_pair[2], self.image_pair[3]
if HH == 0:
HH = "delete"
else:
HH = "NULL"
self.para_dict.update({"imageinfo_QualifyValue_HH": HH})
if HV == 0:
HV = "delete"
else:
HV = "NULL"
self.para_dict.update({"imageinfo_QualifyValue_HV": HV})
if VH == 0:
VH = "delete"
else:
VH = "NULL"
self.para_dict.update({"imageinfo_QualifyValue_VH": VH})
if VV == 0:
VV = "delete"
else:
VV = "NULL"
self.para_dict.update({"imageinfo_QualifyValue_VV": VV})
return self.para_dict
class CreateStadardXmlFile:
"""读取字典中的属性值生成一个标准的xml文件"""
def __init__(self, xml_path, para_xml_path, par_dict, path):
"""
par_dict:字典
path:xml模板输出路径
"""
self.par_dict = par_dict
self.path = path
self.para_xml_path = para_xml_path
shutil.copy(xml_path, path)
pass
def create_standard_xml(self):
"""将字典中的信息写入到copy的xml文件中"""
tree = ElementTree()
tree.parse(self.path) # 影像头文件
root = tree.getroot()
para_tree = ElementTree()
para_tree.parse(self.para_xml_path) # 影像头文件
para_root = para_tree.getroot()
productinfo = root.find("productinfo")
for key, value in self.par_dict.items():
if key.split("_")[0] != "imageinfo":
productinfo.find(key).text = str(value)
elif key.split("_")[0] == "imageinfo":
imageinfo = productinfo.find("imageinfo")
if key.split("_")[1] in ["EarthModel", "ProjectModel", "width", "height", "widthspace", "heightspace"]:
imageinfo.find(key.split("_")[1]).text = str(value)
elif key.split("_")[1] == "center":
center = imageinfo.find("center")
center.find(key.split("_")[2]).text = str(value)
elif key.split("_")[1] == "corner":
corner = imageinfo.find("corner")
corner.find(key.split("_")[2]).find(key.split("_")[3]).text = str(value)
elif key.split("_")[1] == "QualifyValue":
QualifyValue = imageinfo.find("QualifyValue")
if value == "delete":
element_QualifyValue = list(QualifyValue)
for i in element_QualifyValue:
if i.tag == key.split("_")[2]:
QualifyValue.remove(i)
else:
QualifyValue.find(key.split("_")[2]).text = str(value)
pass
processinfo = root.find("processinfo")
Parameter = root.find("processinfo").find("Parameter")
para_list = para_root.find("AlgCompt").find("Inputs").findall("Parameter")
for para in para_list:
if para.find("ParaName").text == "NumConnections":
para_value = para.find("ParaValue").text
Parameter.find("NumConnections").text = str(para_value)
elif para.find("ParaName").text == "EsdCoherenceThreshold":
para_value = para.find("ParaValue").text
Parameter.find("EsdCoherenceThreshold").text = str(para_value)
tree.write(self.path, encoding="utf-8", xml_declaration=True)

144
dem-S-SAR/Dem_S_SAR_V3.xml Normal file
View File

@ -0,0 +1,144 @@
<?xml version='1.0' encoding='utf-8'?>
<Root>
<TaskID>CSAR_202107275419_0001-0</TaskID>
<WorkSpace>D:\micro\SWork\</WorkSpace>
<AlgCompt>
<DataTransModel>File</DataTransModel>
<Artificial>ElementAlg</Artificial>
<AlgorithmName>Dem_C_SAR_V3</AlgorithmName>
<DllName>Dem_C_SAR_V3.exe</DllName>
<ChsName>高程产品</ChsName>
<AlgorithmDesc>微波卫星3-5级产品生产模型</AlgorithmDesc>
<AlgorithmAlias>Dem_C_SAR_V3</AlgorithmAlias>
<Version>3</Version>
<AlgorithmClass>几何类产品_高程产品</AlgorithmClass>
<AlgorithmLevel>5</AlgorithmLevel>
<AlgoirthmID>DEM_CSAR_中科卫星应用德清研究院_3</AlgoirthmID>
<Author>中科卫星应用德清研究院</Author>
<Type>算法</Type>
<jdkVersion>1.8</jdkVersion>
<algDevlanguage>python</algDevlanguage>
<Environment>
<IsCluster>0</IsCluster>
<ClusterNum>0</ClusterNum>
<OperatingSystem>Windows10</OperatingSystem>
<CPU>单核</CPU>
<Memory>32GB</Memory>
<Storage>64GB</Storage>
<NetworkCard>无需求</NetworkCard>
<Bandwidth>无需求</Bandwidth>
<GPU>无需求</GPU>
</Environment>
<Utility Resolution="1" Satellite="GF3BC" Sensor="MSS" />
<Inputs ParameterNum="5">
<Parameter>
<ParaName>MainImg</ParaName>
<ParaChsName>主影像时间</ParaChsName>
<Description>CSAR 主影像成像时间</Description>
<ParaType>Value</ParaType>
<DataType>float</DataType>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<ParaSource>Man</ParaSource>
<ParaValue>20230522</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
<Parameter>
<ParaName>SARS</ParaName>
<ParaChsName>SAR影像文件夹路径</ParaChsName>
<Description>CSAR slc 影像数据</Description>
<ParaType>File</ParaType>
<DataType>zip</DataType>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<ParaSource>Cal</ParaSource>
<ParaValue>
F:\HJ2EFInSar\slc\HJ2E_KSC_STRIP_003375_E100.3_N26.8_20230522_SLC_HHHV_L10000057058.tar.gz;
F:\HJ2EFInSar\slc\HJ2E_MYC_STRIP_003526_E100.3_N26.8_20230601_SLC_HHHV_L10000061276.tar.gz</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>File</Control>
<InputType>Satellite</InputType>
<InputNum>2</InputNum>
<DateFrom>GF3BC</DateFrom>
</Parameter>
<Parameter>
<ParaName>box</ParaName>
<ParaChsName>经纬度包围盒</ParaChsName>
<Description>经纬度包围盒SNWE。例子37;38.2;108.87;109.1</Description>
<ParaType>value</ParaType>
<DataType>string</DataType>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<ParaSource>Man</ParaSource>
<ParaValue>empty</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>True</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
<Parameter>
<ParaName>DEM</ParaName>
<ParaChsName>DEM高程数据路径</ParaChsName>
<Description>DEM高程数据数据。数据来源:30米 ASTGTM2, 数据格式tif。备注数据的经纬度范围必须是整数</Description>
<ParaType>File</ParaType>
<DataType>File</DataType>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<ParaSource>Cal</ParaSource>
<ParaValue>F:\HJ2EFInSar\dem</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>False</EnMultipleChoice>
<Control>File</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>DEM</DateFrom>
</Parameter>
<Parameter>
<ParaName>EsdCoherenceThreshold</ParaName>
<ParaChsName>相干性阈值</ParaChsName>
<Description>'Coherence threshold for estimating azimuth misregistration using enhanced
spectral diversity </Description>
<ParaType>Value</ParaType>
<DataType>float</DataType>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<ParaSource>Man</ParaSource>
<ParaValue>0.75</ParaValue>
<EnModification>True</EnModification>
<EnMultipleChoice>True</EnMultipleChoice>
<Control>UploadInput</Control>
<InputType>Aux</InputType>
<InputNum>0</InputNum>
<DateFrom>Aux</DateFrom>
</Parameter>
</Inputs>
<Outputs ParameterNum="1">
<Parameter>
<ParaName>DemProduct</ParaName>
<ParaChsName>高程产品</ParaChsName>
<Description>高程产品</Description>
<ParaType>File</ParaType>
<DataType>tar.gz</DataType>
<ParaSource>Cal</ParaSource>
<OptionValue>DEFAULT</OptionValue>
<MinValue>DEFAULT</MinValue>
<MaxValue>DEFAULT</MaxValue>
<NoDataValue>-9999</NoDataValue>
<ParaValue>D:\micro\SWork\Dem\Output\HJ2E_KSC_STRIP_003375_E100.3_N26.8_20230522_SLC_HHHV_L10000057058-DEM.tar.gz</ParaValue>
</Parameter>
</Outputs>
</AlgCompt>
</Root>

Binary file not shown.

View File

@ -0,0 +1,396 @@
"""Shared support for scanning document type declarations in HTML and XHTML.
This module is used as a foundation for the html.parser module. It has no
documented public API and should not be used directly.
"""
import re
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
_commentclose = re.compile(r'--\s*>')
_markedsectionclose = re.compile(r']\s*]\s*>')
# An analysis of the MS-Word extensions is available at
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
_msmarkedsectionclose = re.compile(r']\s*>')
del re
class ParserBase:
"""Parser base class which provides some common support methods used
by the SGML/HTML and XHTML parsers."""
def __init__(self):
if self.__class__ is ParserBase:
raise RuntimeError(
"_markupbase.ParserBase must be subclassed")
def error(self, message):
raise NotImplementedError(
"subclasses of ParserBase must override error()")
def reset(self):
self.lineno = 1
self.offset = 0
def getpos(self):
"""Return current line number and offset."""
return self.lineno, self.offset
# Internal -- update line number and offset. This should be
# called for each piece of data exactly once, in order -- in other
# words the concatenation of all the input strings to this
# function should be exactly the entire input.
def updatepos(self, i, j):
if i >= j:
return j
rawdata = self.rawdata
nlines = rawdata.count("\n", i, j)
if nlines:
self.lineno = self.lineno + nlines
pos = rawdata.rindex("\n", i, j) # Should not fail
self.offset = j-(pos+1)
else:
self.offset = self.offset + j-i
return j
_decl_otherchars = ''
# Internal -- parse declaration (for use by subclasses).
def parse_declaration(self, i):
# This is some sort of declaration; in "HTML as
# deployed," this should only be the document type
# declaration ("<!DOCTYPE html...>").
# ISO 8879:1986, however, has more complex
# declaration syntax for elements in <!...>, including:
# --comment--
# [marked section]
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
# ATTLIST, NOTATION, SHORTREF, USEMAP,
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
rawdata = self.rawdata
j = i + 2
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
if rawdata[j:j+1] == ">":
# the empty comment <!>
return j + 1
if rawdata[j:j+1] in ("-", ""):
# Start of comment followed by buffer boundary,
# or just a buffer boundary.
return -1
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
n = len(rawdata)
if rawdata[j:j+2] == '--': #comment
# Locate --.*-- as the body of the comment
return self.parse_comment(i)
elif rawdata[j] == '[': #marked section
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
# Note that this is extended by Microsoft Office "Save as Web" function
# to include [if...] and [endif].
return self.parse_marked_section(i)
else: #all other declaration elements
decltype, j = self._scan_name(j, i)
if j < 0:
return j
if decltype == "doctype":
self._decl_otherchars = ''
while j < n:
c = rawdata[j]
if c == ">":
# end of declaration syntax
data = rawdata[i+2:j]
if decltype == "doctype":
self.handle_decl(data)
else:
# According to the HTML5 specs sections "8.2.4.44 Bogus
# comment state" and "8.2.4.45 Markup declaration open
# state", a comment token should be emitted.
# Calling unknown_decl provides more flexibility though.
self.unknown_decl(data)
return j + 1
if c in "\"'":
m = _declstringlit_match(rawdata, j)
if not m:
return -1 # incomplete
j = m.end()
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
name, j = self._scan_name(j, i)
elif c in self._decl_otherchars:
j = j + 1
elif c == "[":
# this could be handled in a separate doctype parser
if decltype == "doctype":
j = self._parse_doctype_subset(j + 1, i)
elif decltype in {"attlist", "linktype", "link", "element"}:
# must tolerate []'d groups in a content model in an element declaration
# also in data attribute specifications of attlist declaration
# also link type declaration subsets in linktype declarations
# also link attribute specification lists in link declarations
self.error("unsupported '[' char in %s declaration" % decltype)
else:
self.error("unexpected '[' char in declaration")
else:
self.error(
"unexpected %r char in declaration" % rawdata[j])
if j < 0:
return j
return -1 # incomplete
# Internal -- parse a marked section
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
def parse_marked_section(self, i, report=1):
rawdata= self.rawdata
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
sectName, j = self._scan_name( i+3, i )
if j < 0:
return j
if sectName in {"temp", "cdata", "ignore", "include", "rcdata"}:
# look for standard ]]> ending
match= _markedsectionclose.search(rawdata, i+3)
elif sectName in {"if", "else", "endif"}:
# look for MS Office ]> ending
match= _msmarkedsectionclose.search(rawdata, i+3)
else:
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
match = None
if not match:
return -1
if report:
j = match.start(0)
self.unknown_decl(rawdata[i+3: j])
return match.end(0)
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i, report=1):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
self.error('unexpected call to parse_comment()')
match = _commentclose.search(rawdata, i+4)
if not match:
return -1
if report:
j = match.start(0)
self.handle_comment(rawdata[i+4: j])
return match.end(0)
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
# returning the index just past any whitespace following the trailing ']'.
def _parse_doctype_subset(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
j = i
while j < n:
c = rawdata[j]
if c == "<":
s = rawdata[j:j+2]
if s == "<":
# end of buffer; incomplete
return -1
if s != "<!":
self.updatepos(declstartpos, j + 1)
self.error("unexpected char in internal subset (in %r)" % s)
if (j + 2) == n:
# end of buffer; incomplete
return -1
if (j + 4) > n:
# end of buffer; incomplete
return -1
if rawdata[j:j+4] == "<!--":
j = self.parse_comment(j, report=0)
if j < 0:
return j
continue
name, j = self._scan_name(j + 2, declstartpos)
if j == -1:
return -1
if name not in {"attlist", "element", "entity", "notation"}:
self.updatepos(declstartpos, j + 2)
self.error(
"unknown declaration %r in internal subset" % name)
# handle the individual names
meth = getattr(self, "_parse_doctype_" + name)
j = meth(j, declstartpos)
if j < 0:
return j
elif c == "%":
# parameter entity reference
if (j + 1) == n:
# end of buffer; incomplete
return -1
s, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
if rawdata[j] == ";":
j = j + 1
elif c == "]":
j = j + 1
while j < n and rawdata[j].isspace():
j = j + 1
if j < n:
if rawdata[j] == ">":
return j
self.updatepos(declstartpos, j)
self.error("unexpected char after internal subset")
else:
return -1
elif c.isspace():
j = j + 1
else:
self.updatepos(declstartpos, j)
self.error("unexpected char %r in internal subset" % c)
# end of buffer reached
return -1
# Internal -- scan past <!ELEMENT declarations
def _parse_doctype_element(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j == -1:
return -1
# style content model; just skip until '>'
rawdata = self.rawdata
if '>' in rawdata[j:]:
return rawdata.find(">", j) + 1
return -1
# Internal -- scan past <!ATTLIST declarations
def _parse_doctype_attlist(self, i, declstartpos):
rawdata = self.rawdata
name, j = self._scan_name(i, declstartpos)
c = rawdata[j:j+1]
if c == "":
return -1
if c == ">":
return j + 1
while 1:
# scan a series of attribute descriptions; simplified:
# name type [value] [#constraint]
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if c == "":
return -1
if c == "(":
# an enumerated type; look for ')'
if ")" in rawdata[j:]:
j = rawdata.find(")", j) + 1
else:
return -1
while rawdata[j:j+1].isspace():
j = j + 1
if not rawdata[j:]:
# end of buffer, incomplete
return -1
else:
name, j = self._scan_name(j, declstartpos)
c = rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1
c = rawdata[j:j+1]
if not c:
return -1
if c == "#":
if rawdata[j:] == "#":
# end of buffer
return -1
name, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if not c:
return -1
if c == '>':
# all done
return j + 1
# Internal -- scan past <!NOTATION declarations
def _parse_doctype_notation(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j < 0:
return j
rawdata = self.rawdata
while 1:
c = rawdata[j:j+1]
if not c:
# end of buffer; incomplete
return -1
if c == '>':
return j + 1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if not m:
return -1
j = m.end()
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan past <!ENTITY declarations
def _parse_doctype_entity(self, i, declstartpos):
rawdata = self.rawdata
if rawdata[i:i+1] == "%":
j = i + 1
while 1:
c = rawdata[j:j+1]
if not c:
return -1
if c.isspace():
j = j + 1
else:
break
else:
j = i
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
while 1:
c = self.rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1 # incomplete
elif c == ">":
return j + 1
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan a name token and the new position and the token, or
# return -1 if we've reached the end of the buffer.
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = _declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.updatepos(declstartpos, i)
self.error("expected name token at %r"
% rawdata[declstartpos:declstartpos+20])
# To be overridden -- handlers for unknown objects
def unknown_decl(self, data):
pass

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,44 @@
AUTHORS
=======
PGP key fingerprints are enclosed in parentheses.
* Alex Gaynor <alex.gaynor@gmail.com> (E27D 4AA0 1651 72CB C5D2 AF2B 125F 5C67 DFE9 4084)
* Hynek Schlawack <hs@ox.cx> (C2A0 4F86 ACE2 8ADC F817 DBB7 AE25 3622 7F69 F181)
* Donald Stufft <donald@stufft.io>
* Laurens Van Houtven <_@lvh.io> (D9DC 4315 772F 8E91 DD22 B153 DFD1 3DF7 A8DD 569B)
* Christian Heimes <christian@python.org>
* Paul Kehrer <paul.l.kehrer@gmail.com> (05FD 9FA1 6CF7 5735 0D91 A560 235A E5F1 29F9 ED98)
* Jarret Raim <jarito@gmail.com>
* Alex Stapleton <alexs@prol.etari.at> (A1C7 E50B 66DE 39ED C847 9665 8E3C 20D1 9BD9 5C4C)
* David Reid <dreid@dreid.org> (0F83 CC87 B32F 482B C726 B58A 9FBF D8F4 DA89 6D74)
* Matthew Lefkowitz <glyph@twistedmatrix.com> (06AB F638 E878 CD29 1264 18AB 7EC2 8125 0FBC 4A07)
* Konstantinos Koukopoulos <koukopoulos@gmail.com> (D6BD 52B6 8C99 A91C E2C8 934D 3300 566B 3A46 726E)
* Stephen Holsapple <sholsapp@gmail.com>
* Terry Chia <terrycwk1994@gmail.com>
* Matthew Iversen <matt@notevencode.com> (2F04 3DCC D6E6 D5AC D262 2E0B C046 E8A8 7452 2973)
* Mohammed Attia <skeuomorf@gmail.com>
* Michael Hart <michael.hart1994@gmail.com>
* Mark Adams <mark@markadams.me> (A18A 7DD3 283C CF2A B0CE FE0E C7A0 5E3F C972 098C)
* Gregory Haynes <greg@greghaynes.net> (6FB6 44BF 9FD0 EBA2 1CE9 471F B08F 42F9 0DC6 599F)
* Chelsea Winfree <chelsea.winfree@gmail.com>
* Steven Buss <steven.buss@gmail.com> (1FB9 2EC1 CF93 DFD6 B47F F583 B1A5 6C22 290D A4C3)
* Andre Caron <andre.l.caron@gmail.com>
* Jiangge Zhang <tonyseek@gmail.com> (BBEC 782B 015F 71B1 5FF7 EACA 1A8C AA98 255F 5000)
* Major Hayden <major@mhtx.net> (1BF9 9264 9596 0033 698C 252B 7370 51E0 C101 1FB1)
* Phoebe Queen <foibey@gmail.com> (10D4 7741 AB65 50F4 B264 3888 DA40 201A 072B C1FA)
* Google Inc.
* Amaury Forgeot d'Arc <amauryfa@google.com>
* Dirkjan Ochtman <dirkjan@ochtman.nl> (25BB BAC1 13C1 BFD5 AA59 4A4C 9F96 B929 3038 0381)
* Maximilian Hils <max@maximilianhils.com>
* Simo Sorce <simo@redhat.com>
* Thomas Sileo <t@a4.io>
* Fraser Tweedale <ftweedal@redhat.com>
* Ofek Lev <ofekmeister@gmail.com> (FFB6 B92B 30B1 7848 546E 9912 972F E913 DAD5 A46E)
* Erik Daguerre <fallenwolf@wolfthefallen.com>
* Aviv Palivoda <palaviv@gmail.com>
* Chris Wolfe <chriswwolfe@gmail.com>
* Jeremy Lainé <jeremy.laine@m4x.org>
* Denis Gladkikh <denis@gladkikh.email>
* John Pacific <me@johnpacific.com> (2CF6 0381 B5EF 29B7 D48C 2020 7BB9 71A0 E891 44D9)
* Marti Raudsepp <marti@juffo.org>

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,6 @@
This software is made available under the terms of *either* of the licenses
found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
under the terms of *both* these licenses.
The code used in the OS random engine is derived from CPython, and is licensed
under the terms of the PSF License Agreement.

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,27 @@
Copyright (c) Individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of PyCA Cryptography nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,41 @@
1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
the Individual or Organization ("Licensee") accessing and otherwise using Python
2.7.12 software in source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python 2.7.12 alone or in any derivative
version, provided, however, that PSF's License Agreement and PSF's notice of
copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights
Reserved" are retained in Python 2.7.12 alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on or
incorporates Python 2.7.12 or any part thereof, and wants to make the
derivative work available to others as provided herein, then Licensee hereby
agrees to include in any such work a brief summary of the changes made to Python
2.7.12.
4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis.
PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE
THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material breach of
its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any relationship
of agency, partnership, or joint venture between PSF and Licensee. This License
Agreement does not grant permission to use PSF trademarks or trade name in a
trademark sense to endorse or promote products or services of Licensee, or any
third party.
8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees
to be bound by the terms and conditions of this License Agreement.

View File

@ -0,0 +1,136 @@
Metadata-Version: 2.1
Name: cryptography
Version: 3.3.2
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Home-page: https://github.com/pyca/cryptography
Author: The cryptography developers
Author-email: cryptography-dev@python.org
License: BSD or Apache License, Version 2.0
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX
Classifier: Operating System :: POSIX :: BSD
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Microsoft :: Windows
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Security :: Cryptography
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: LICENSE.APACHE
License-File: LICENSE.BSD
License-File: LICENSE.PSF
License-File: AUTHORS.rst
Requires-Dist: six (>=1.4.1)
Requires-Dist: cffi (>=1.12)
Requires-Dist: enum34 ; python_version < '3'
Requires-Dist: ipaddress ; python_version < '3'
Provides-Extra: docs
Requires-Dist: sphinx (!=1.8.0,!=3.1.0,!=3.1.1,>=1.6.5) ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
Provides-Extra: docstest
Requires-Dist: doc8 ; extra == 'docstest'
Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest'
Requires-Dist: twine (>=1.12.0) ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest'
Provides-Extra: pep8test
Requires-Dist: black ; extra == 'pep8test'
Requires-Dist: flake8 ; extra == 'pep8test'
Requires-Dist: flake8-import-order ; extra == 'pep8test'
Requires-Dist: pep8-naming ; extra == 'pep8test'
Provides-Extra: ssh
Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh'
Provides-Extra: test
Requires-Dist: pytest (!=3.9.0,!=3.9.1,!=3.9.2,>=3.6.0) ; extra == 'test'
Requires-Dist: pretend ; extra == 'test'
Requires-Dist: iso8601 ; extra == 'test'
Requires-Dist: pytz ; extra == 'test'
Requires-Dist: hypothesis (!=3.79.2,>=1.11.4) ; extra == 'test'
pyca/cryptography
=================
.. image:: https://img.shields.io/pypi/v/cryptography.svg
:target: https://pypi.org/project/cryptography/
:alt: Latest Version
.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
:target: https://cryptography.io
:alt: Latest Docs
.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=master
:target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amaster
.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master
:target: https://codecov.io/github/pyca/cryptography?branch=master
``cryptography`` is a package which provides cryptographic recipes and
primitives to Python developers. Our goal is for it to be your "cryptographic
standard library". It supports Python 2.7, Python 3.6+, and PyPy 5.4+.
``cryptography`` includes both high level recipes and low level interfaces to
common cryptographic algorithms such as symmetric ciphers, message digests, and
key derivation functions. For example, to encrypt something with
``cryptography``'s high level symmetric encryption recipe:
.. code-block:: pycon
>>> from cryptography.fernet import Fernet
>>> # Put this somewhere safe!
>>> key = Fernet.generate_key()
>>> f = Fernet(key)
>>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
>>> token
'...'
>>> f.decrypt(token)
'A really secret message. Not for prying eyes.'
You can find more information in the `documentation`_.
You can install ``cryptography`` with:
.. code-block:: console
$ pip install cryptography
For full details see `the installation documentation`_.
Discussion
~~~~~~~~~~
If you run into bugs, you can file them in our `issue tracker`_.
We maintain a `cryptography-dev`_ mailing list for development discussion.
You can also join ``#cryptography-dev`` on Freenode to ask questions or get
involved.
Security
~~~~~~~~
Need to report a security issue? Please consult our `security reporting`_
documentation.
.. _`documentation`: https://cryptography.io/
.. _`the installation documentation`: https://cryptography.io/en/latest/installation.html
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
.. _`security reporting`: https://cryptography.io/en/latest/security.html

View File

@ -0,0 +1,97 @@
cryptography-3.3.2.dist-info/AUTHORS.rst,sha256=MoKTlP6yOmnLC_KXarHVQP0sItBk11dtZ7LzV0VhNB0,2475
cryptography-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-3.3.2.dist-info/LICENSE,sha256=Q9rSzHUqtyHNmp827OcPtTq3cTVR8tPYaU2OjFoG1uI,323
cryptography-3.3.2.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-3.3.2.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography-3.3.2.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415
cryptography-3.3.2.dist-info/METADATA,sha256=633UPUD3ojepCqe83P99A1w51VeAscaDTg3CS1ov7bw,5190
cryptography-3.3.2.dist-info/RECORD,,
cryptography-3.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography-3.3.2.dist-info/WHEEL,sha256=qN4i5kDbMd9ITQn9KMACG44Nnrayu7b_5zsXEXpun_M,110
cryptography-3.3.2.dist-info/direct_url.json,sha256=rJ9-4Dc7CpzxT6pTLmKzAHjQup8UFVRSwnlP_eoguio,174
cryptography-3.3.2.dist-info/top_level.txt,sha256=rR2wh6A6juD02TBZNJqqonh8x9UP9Sa5Z9Hl1pCPCiM,31
cryptography/__about__.py,sha256=x2f7Chx8oX2tr7vddOVqh8E-cMot6emZefrup1uQY6Y,835
cryptography/__init__.py,sha256=lJ5HUOGCKi9r-XG4Y3qXq9dhCFv8RqwZKZgkQjQLboA,964
cryptography/exceptions.py,sha256=NPtDqIq1lsQ1Gb1BXkjsGIvbMrWMaKCaT8epiSgi010,1259
cryptography/fernet.py,sha256=sg5RNOCKx9BrPV6wIfyXB9sDWJcw9-GPcPgN4lVmr8w,5980
cryptography/hazmat/__init__.py,sha256=hEPNQw8dgjIPIn42qaLwXNRLCyTGNZeSvkQb57DPhbs,483
cryptography/hazmat/_der.py,sha256=NkwxQBcrR_KMAZCM3WKidXgx8CHFVU5iBnoFIrhQMQs,5205
cryptography/hazmat/_oid.py,sha256=3L1KLxAsQJJoy15ZCl0T4I-PU-DVvzGS-ZTdS-PNy14,2432
cryptography/hazmat/backends/__init__.py,sha256=EEhjIZgqApO7coGuybLXyaEaWIHcdg8oC0i2vxQ4RSI,616
cryptography/hazmat/backends/interfaces.py,sha256=GXySHrpGLgeTrjUgxOYtK6viaphO1dDKAOA95JFj_pM,10770
cryptography/hazmat/backends/openssl/__init__.py,sha256=k4DMe228_hTuB2kY3Lwk62JdI3EmCd7VkV01zJm57ps,336
cryptography/hazmat/backends/openssl/aead.py,sha256=ljOSkI7NXgXi9OyfHjm9J07m3EVHFNm9kfHAIogSWtc,5765
cryptography/hazmat/backends/openssl/backend.py,sha256=CwITPFn7F3Bjxr_W6xFJcIIv-MLlQLJFdcnso8SS_U0,106372
cryptography/hazmat/backends/openssl/ciphers.py,sha256=aDTU8pMDjl2N3AKcYZO2jXpeqW9mV5rIOp0guPpKlp4,8608
cryptography/hazmat/backends/openssl/cmac.py,sha256=n34WXNXt-r0trp207u0cSKwGMth8qEiEs2jjgmHNtWE,2855
cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=BS2Y-4ZudWl-CB_fZ0YqVYIOQrnv7ziOhjpo-QIq8_o,32332
cryptography/hazmat/backends/openssl/dh.py,sha256=1fZn8one2aSla85LIe6vXbf0qoLTDS-B7tYMcrJshnY,10239
cryptography/hazmat/backends/openssl/dsa.py,sha256=Cp1w1Z6J_PEW-Qd2RAzfC04MU9YxqYOaef57f_QVpYI,10036
cryptography/hazmat/backends/openssl/ec.py,sha256=c3DUb_AZ215f9AaAHyOKqBoNUSd6sFbUIDMbLrbLuLA,12071
cryptography/hazmat/backends/openssl/ed25519.py,sha256=fInLppwHZnYgwkQQ5MdsOCux_y3kfW-290EbGn-0bKE,5618
cryptography/hazmat/backends/openssl/ed448.py,sha256=Wp7dkPjb2Tyjzguh1bHwzXItMPqJq_A9-D7zCwHqnc8,5574
cryptography/hazmat/backends/openssl/encode_asn1.py,sha256=5tQmLfLEyKTm3Eg_GfhGJcPtuQ0Ef2OdbsgxVC2fcMc,24075
cryptography/hazmat/backends/openssl/hashes.py,sha256=n6XJwCI-2OU6FndiatFbE_Pgb3f1NoVuHwpCMW0z340,3117
cryptography/hazmat/backends/openssl/hmac.py,sha256=D_YcF2OiLSfrWtA7fksLiKWcaVh-G1igpqNHuM5l62c,2933
cryptography/hazmat/backends/openssl/ocsp.py,sha256=NEGrc30GfPBLbjnt-K3K48-dZK2dEyQa2oCyv7-laMs,14028
cryptography/hazmat/backends/openssl/poly1305.py,sha256=LiovW4SvSUhWA109IkLlw4nnokmF9m24V4pGxqoPmMI,2393
cryptography/hazmat/backends/openssl/rsa.py,sha256=hcBFzZ51LA2UJfVGf00xul5nWLeT-9Sz7ufCKls195w,19577
cryptography/hazmat/backends/openssl/utils.py,sha256=-JMyOgOlplSWL5zTu_3-vl5_gE1FBK3ew6n0Zs35QYo,2348
cryptography/hazmat/backends/openssl/x25519.py,sha256=-MNAPGS_DZ37-skSn17-gIakFLoJmuNx8PlC8s2-00g,4488
cryptography/hazmat/backends/openssl/x448.py,sha256=5WH3Rw7kZGLS3EDDVzjrYriAG-tzUnyWetyqMYTiEhA,4011
cryptography/hazmat/backends/openssl/x509.py,sha256=EMN9qSPW1BVZ1VAOHzgi8oO8idI8iOb0wrWjdrr5FpI,21620
cryptography/hazmat/bindings/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
cryptography/hazmat/bindings/_openssl.abi3.dll,sha256=iUsiW2tZHOzE7jp78OHA2-g1hAV7oYvFWiRkXhEuEww,2366796
cryptography/hazmat/bindings/_padding.abi3.dll,sha256=arMD6x2xM0At_NxI28-TvHmvSmTPmGYhE_eEaiG3xw8,77631
cryptography/hazmat/bindings/openssl/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=6-EwpZeSqbLNRPhzsXFPTO498wLGaDXW-LvkqiJm4vQ,8291
cryptography/hazmat/bindings/openssl/binding.py,sha256=yT5e2JrzANd6FG__us6aj9ocb48EnNJK61cKwrpeM08,5816
cryptography/hazmat/primitives/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=WhUn3tGxoLAxGAsZHElJ2aOILXSh55AZi04MBudYmQA,1020
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=kuyPcccLeOYy4OuGkegEyqMSzRo-QyjlUw463jzfrGs,5859
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=XuE2mUXl-fXi2q7w22qKyiCTFUz-852cFTwV4WOUQgw,7181
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=2rorlIEXHGkLnI8bbeFKMRr-gJfEipuJigQDQh4xk7w,14006
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=rfImUQH-PcTliuxiF864aSww7dQCWVwZgjPPbDXiGlI,2401
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=JyrEHwYF_Ftj_E60t-Gmvm3CGnQSxVbasptZBW84eBk,2328
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=2pPqBu4dGERtFPHnPRTZ0iRO_XY9hr9RTwlTcr_J5bw,2250
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=MgxdkA8PWlXGt2lMPpnV9QYYvQnYTFjb0RtJRDjnlfU,10672
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=w2lQIcKrFvS9D_Ekt7qWed39TXM6hueg72FFrfwIo58,1201
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=vrN1jcO6sjbQrc7auIlf2aEvcH3P17cKUuaVXxaTvxI,2277
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=u3v-L1IJIG2RyLVTh7FMkXh_Y-oVb3HdEj5b1c-JlKk,2255
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=mi4yR3Fxc4-Au3yX4PyhFNaiFn0yywZKiTzecdI77EI,647
cryptography/hazmat/primitives/ciphers/aead.py,sha256=lXgZOxlbxtBp1k7KmlqgiN_Xu6yPsJE_DNJLwsgm0o0,6134
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=GKFIhvOoqsYscjjP7onl8XnAmOa-kSQ6jiMMS2zeGBM,4225
cryptography/hazmat/primitives/ciphers/base.py,sha256=vceN5l7yxLWmNTptlzC3gmfFY-K_ANKk4HdNl2Ptz2k,7253
cryptography/hazmat/primitives/ciphers/modes.py,sha256=-0VTtHN3kKO_Jyc_iLAgp8bqtsXJY5V2F__Bkr6nvtM,6805
cryptography/hazmat/primitives/cmac.py,sha256=eJpysDFbc7W6OiplzWKWrL4owy30Cq6Nsao8mzapqbE,2130
cryptography/hazmat/primitives/constant_time.py,sha256=_x4mrHW-9ihfgY89BwhATFiIuG2_1l-HMkCxmOUkydM,430
cryptography/hazmat/primitives/hashes.py,sha256=dzL1QcEFj4eElzczo8QmuOeooZ96EFwBy3c-6cpew0w,6315
cryptography/hazmat/primitives/hmac.py,sha256=AYzTQMDiruKmZKKLR6ceVjX5yQ3mpciWIx__tpNLyr4,2306
cryptography/hazmat/primitives/kdf/__init__.py,sha256=nod5HjPswjZr8wFp6Tsu6en9blHYF3khgXI5R0zIcnM,771
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=gW-xAU6sPE6aZhg_G9ucZ5b_uctSbPcfSpHyyt7Q8MA,4095
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=SJJQzeQ9OH0t3tUdUq2GT6IQXv9oPLDjulT7wnLTkMg,3598
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=awf7zessT-amokp2VBdyW8TWrDnmTXGzHHX4scBO9Uc,5100
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=RYexIlGomzUEU-_QQXTW81rdY5YVZB30XrfnJq8NsIU,2220
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=C0C3m-gEnlLlAVxzRFdzx1mfDuWs_BkZDoSV2hfahfk,2268
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=26-b_ckyUYiqbWM9mZ7FEWbuvR7eTLksIeWQeW1TJ04,2407
cryptography/hazmat/primitives/keywrap.py,sha256=fF-HA5ETz9RH8s8LB94uDoWRLPvwPkYAC5_Kylej6sA,5730
cryptography/hazmat/primitives/padding.py,sha256=zeJmjPfX8Cx_gqO45FDBNe8iN2trPr0ULyBsz1Kmyu4,6173
cryptography/hazmat/primitives/poly1305.py,sha256=NNC1WYiYQGNJ8mblkaHRxBm1PLdaKRzkILocsYH5zgY,1679
cryptography/hazmat/primitives/serialization/__init__.py,sha256=eLzmqoHgVlPK1aTGiEfpaIrUf9mX5PRrM7IHEc8FeQU,1132
cryptography/hazmat/primitives/serialization/base.py,sha256=ZSzV-5zl2Bt_mmihcPqieBC6UjMSryUaehgExvjZksg,2249
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=oJxangAtSSsniXfguLaoPgejVchs-VpCTBdWSW4rF54,1853
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=vGlw_2R4VeLWtoRxkfz8fMLE5i_CCdaY9bEtYMV62rk,4625
cryptography/hazmat/primitives/serialization/ssh.py,sha256=a_FKWuqpHO-RzUBEoBWS5q7WyMZwS56MD92Wr6j3KBA,21682
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=BWrm3DKDoAa281E7U_nzz8v44OmAiXmlIycFcsehwfE,288
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=2uCTCTHMFmWL9kOjA890F0CVrljsvOjJYISKBup7GyI,2679
cryptography/hazmat/primitives/twofactor/totp.py,sha256=iJRTxPNWPdsTQHePgSE6KGdRNURTv188VNqpyvBwvBY,1780
cryptography/hazmat/primitives/twofactor/utils.py,sha256=ZKZSOL2cLsGCsSNfx3kYlYt91A4bcU1w9up2EL1hwaA,982
cryptography/utils.py,sha256=QpZgLOABfeaDciPlrF-W8giJiOL2AzU6Ajjq6h6WkzY,4745
cryptography/x509/__init__.py,sha256=1juFH-nvLS7kU0x52VMN7pN6s7H55Y86NqUszaBhhi4,7699
cryptography/x509/base.py,sha256=burWvWUouPiPzmPUzNZUzEe64gR-WMkNyiDpjYCvEc8,26409
cryptography/x509/certificate_transparency.py,sha256=eJ9lrITdyMn4XsrcVdrTaFVI_RR7mX_VzMZyiaEpbps,1000
cryptography/x509/extensions.py,sha256=HOwYCKAy-4qK5eWWYB4UnJejC9Ru3FBQMsLXodasR9Y,52924
cryptography/x509/general_name.py,sha256=nNIG--rJ-TzREkhEq727Fe3tjvxVflW7iPIMjJs6LrI,7942
cryptography/x509/name.py,sha256=j2khdee8jQBkbZd4RV60ji8V0ZngbsB07i5cnflDBPk,8291
cryptography/x509/ocsp.py,sha256=nr5Bk3B_b9LaG-1njEmo0f_smAg2B6CU5Wr6wMr81MI,13245
cryptography/x509/oid.py,sha256=Wp6Y4WMrFa7vsUmV4tbMvPPAl0Iiu4QxQ7on2np94QU,12594

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.0)
Root-Is-Purelib: false
Tag: cp39-cp39-cygwin_3_3_3_x86_64

View File

@ -0,0 +1 @@
{"archive_info": {}, "url": "file:///pub/devel/python/python-cryptography/python-cryptography-3.3.2-1.x86_64/build/dist/cryptography-3.3.2-cp39-cp39-cygwin_3_3_3_x86_64.whl"}

View File

@ -0,0 +1,3 @@
_openssl
_padding
cryptography

View File

@ -0,0 +1,180 @@
'''
OpenCV Python binary extension loader
'''
import os
import importlib
import sys
__all__ = []
try:
import numpy
import numpy.core.multiarray
except ImportError:
print('OpenCV bindings requires "numpy" package.')
print('Install it via command:')
print(' pip install numpy')
raise
# TODO
# is_x64 = sys.maxsize > 2**32
def __load_extra_py_code_for_module(base, name, enable_debug_print=False):
module_name = "{}.{}".format(__name__, name)
export_module_name = "{}.{}".format(base, name)
native_module = sys.modules.pop(module_name, None)
try:
py_module = importlib.import_module(module_name)
except ImportError as err:
if enable_debug_print:
print("Can't load Python code for module:", module_name,
". Reason:", err)
# Extension doesn't contain extra py code
return False
if not hasattr(base, name):
setattr(sys.modules[base], name, py_module)
sys.modules[export_module_name] = py_module
# If it is C extension module it is already loaded by cv2 package
if native_module:
setattr(py_module, "_native", native_module)
for k, v in filter(lambda kv: not hasattr(py_module, kv[0]),
native_module.__dict__.items()):
if enable_debug_print: print(' symbol: {} = {}'.format(k, v))
setattr(py_module, k, v)
return True
def __collect_extra_submodules(enable_debug_print=False):
def modules_filter(module):
return all((
# module is not internal
not module.startswith("_"),
# it is not a file
os.path.isdir(os.path.join(_extra_submodules_init_path, module))
))
if sys.version_info[0] < 3:
if enable_debug_print:
print("Extra submodules is loaded only for Python 3")
return []
__INIT_FILE_PATH = os.path.abspath(__file__)
_extra_submodules_init_path = os.path.dirname(__INIT_FILE_PATH)
return filter(modules_filter, os.listdir(_extra_submodules_init_path))
def bootstrap():
import sys
import copy
save_sys_path = copy.copy(sys.path)
if hasattr(sys, 'OpenCV_LOADER'):
print(sys.path)
raise ImportError('ERROR: recursion is detected during loading of "cv2" binary extensions. Check OpenCV installation.')
sys.OpenCV_LOADER = True
DEBUG = False
if hasattr(sys, 'OpenCV_LOADER_DEBUG'):
DEBUG = True
import platform
if DEBUG: print('OpenCV loader: os.name="{}" platform.system()="{}"'.format(os.name, str(platform.system())))
LOADER_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
PYTHON_EXTENSIONS_PATHS = []
BINARIES_PATHS = []
g_vars = globals()
l_vars = locals()
if sys.version_info[:2] < (3, 0):
from . load_config_py2 import exec_file_wrapper
else:
from . load_config_py3 import exec_file_wrapper
def load_first_config(fnames, required=True):
for fname in fnames:
fpath = os.path.join(LOADER_DIR, fname)
if not os.path.exists(fpath):
if DEBUG: print('OpenCV loader: config not found, skip: {}'.format(fpath))
continue
if DEBUG: print('OpenCV loader: loading config: {}'.format(fpath))
exec_file_wrapper(fpath, g_vars, l_vars)
return True
if required:
raise ImportError('OpenCV loader: missing configuration file: {}. Check OpenCV installation.'.format(fnames))
load_first_config(['config.py'], True)
load_first_config([
'config-{}.{}.py'.format(sys.version_info[0], sys.version_info[1]),
'config-{}.py'.format(sys.version_info[0])
], True)
if DEBUG: print('OpenCV loader: PYTHON_EXTENSIONS_PATHS={}'.format(str(l_vars['PYTHON_EXTENSIONS_PATHS'])))
if DEBUG: print('OpenCV loader: BINARIES_PATHS={}'.format(str(l_vars['BINARIES_PATHS'])))
applySysPathWorkaround = False
if hasattr(sys, 'OpenCV_REPLACE_SYS_PATH_0'):
applySysPathWorkaround = True
else:
try:
BASE_DIR = os.path.dirname(LOADER_DIR)
if sys.path[0] == BASE_DIR or os.path.realpath(sys.path[0]) == BASE_DIR:
applySysPathWorkaround = True
except:
if DEBUG: print('OpenCV loader: exception during checking workaround for sys.path[0]')
pass # applySysPathWorkaround is False
for p in reversed(l_vars['PYTHON_EXTENSIONS_PATHS']):
sys.path.insert(1 if not applySysPathWorkaround else 0, p)
if os.name == 'nt':
if sys.version_info[:2] >= (3, 8): # https://github.com/python/cpython/pull/12302
for p in l_vars['BINARIES_PATHS']:
try:
os.add_dll_directory(p)
except Exception as e:
if DEBUG: print('Failed os.add_dll_directory(): '+ str(e))
pass
os.environ['PATH'] = ';'.join(l_vars['BINARIES_PATHS']) + ';' + os.environ.get('PATH', '')
if DEBUG: print('OpenCV loader: PATH={}'.format(str(os.environ['PATH'])))
else:
# amending of LD_LIBRARY_PATH works for sub-processes only
os.environ['LD_LIBRARY_PATH'] = ':'.join(l_vars['BINARIES_PATHS']) + ':' + os.environ.get('LD_LIBRARY_PATH', '')
if DEBUG: print("Relink everything from native cv2 module to cv2 package")
py_module = sys.modules.pop("cv2")
native_module = importlib.import_module("cv2")
sys.modules["cv2"] = py_module
setattr(py_module, "_native", native_module)
for item_name, item in filter(lambda kv: kv[0] not in ("__file__", "__loader__", "__spec__",
"__name__", "__package__"),
native_module.__dict__.items()):
if item_name not in g_vars:
g_vars[item_name] = item
sys.path = save_sys_path # multiprocessing should start from bootstrap code (https://github.com/opencv/opencv/issues/18502)
try:
del sys.OpenCV_LOADER
except Exception as e:
if DEBUG:
print("Exception during delete OpenCV_LOADER:", e)
if DEBUG: print('OpenCV loader: binary extension... OK')
for submodule in __collect_extra_submodules(DEBUG):
if __load_extra_py_code_for_module("cv2", submodule, DEBUG):
if DEBUG: print("Extra Python code for", submodule, "is loaded")
if DEBUG: print('OpenCV loader: DONE')
bootstrap()

View File

@ -0,0 +1,3 @@
PYTHON_EXTENSIONS_PATHS = [
os.path.join(LOADER_DIR, 'python-3.9')
] + PYTHON_EXTENSIONS_PATHS

View File

@ -0,0 +1,5 @@
import os
BINARIES_PATHS = [
os.path.join(os.path.join(LOADER_DIR, '../../../../'), 'lib')
] + BINARIES_PATHS

View File

@ -0,0 +1,6 @@
# flake8: noqa
import sys
if sys.version_info[:2] < (3, 0):
def exec_file_wrapper(fpath, g_vars, l_vars):
execfile(fpath, g_vars, l_vars)

View File

@ -0,0 +1,9 @@
# flake8: noqa
import os
import sys
if sys.version_info[:2] >= (3, 0):
def exec_file_wrapper(fpath, g_vars, l_vars):
with open(fpath) as f:
code = compile(f.read(), os.path.basename(fpath), 'exec')
exec(code, g_vars, l_vars)

View File

@ -0,0 +1,33 @@
__all__ = []
import sys
import numpy as np
import cv2 as cv
# NumPy documentation: https://numpy.org/doc/stable/user/basics.subclassing.html
class Mat(np.ndarray):
'''
cv.Mat wrapper for numpy array.
Stores extra metadata information how to interpret and process of numpy array for underlying C++ code.
'''
def __new__(cls, arr, **kwargs):
obj = arr.view(Mat)
return obj
def __init__(self, arr, **kwargs):
self.wrap_channels = kwargs.pop('wrap_channels', getattr(arr, 'wrap_channels', False))
if len(kwargs) > 0:
raise TypeError('Unknown parameters: {}'.format(repr(kwargs)))
def __array_finalize__(self, obj):
if obj is None:
return
self.wrap_channels = getattr(obj, 'wrap_channels', None)
Mat.__module__ = cv.__name__
cv.Mat = Mat
cv._registerMatType(Mat)

View File

@ -0,0 +1 @@
from .version import get_ocv_version

View File

@ -0,0 +1,5 @@
import cv2
def get_ocv_version():
return getattr(cv2, "__version__", "unavailable")

View File

@ -0,0 +1,14 @@
from collections import namedtuple
import cv2
NativeMethodPatchedResult = namedtuple("NativeMethodPatchedResult",
("py", "native"))
def testOverwriteNativeMethod(arg):
return NativeMethodPatchedResult(
arg + 1,
cv2.utils._native.testOverwriteNativeMethod(arg)
)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More