110 lines
3.3 KiB
Python
110 lines
3.3 KiB
Python
# -*- coding: UTF-8 -*-
|
|
"""
|
|
@Project:__init__.py
|
|
@File:create_csv_data.py
|
|
@Function:
|
|
@Contact:
|
|
@Author:SHJ
|
|
@Date:2021/9/27 10:17
|
|
@Version:1.0.0
|
|
"""
|
|
from osgeo import gdal
|
|
from osgeo import osr
|
|
import numpy as np
|
|
import math
|
|
import datetime
|
|
import csv
|
|
import random
|
|
from tool.algorithm.image.ImageHandle import ImageHandler
|
|
|
|
def get_data(filename):
|
|
"""
|
|
:param filename: tif路径
|
|
:return: 获取所有波段的数据
|
|
"""
|
|
gdal.AllRegister()
|
|
dataset = gdal.Open(filename)
|
|
if dataset is None:
|
|
return None
|
|
im_width = dataset.RasterXSize
|
|
im_height = dataset.RasterYSize
|
|
im_data = dataset.ReadAsArray(0, 0, im_width, im_height)
|
|
del dataset
|
|
return im_data
|
|
|
|
def create_data(dir,tif_name_list):
|
|
HH = get_data(dir+tif_name_list[0])
|
|
HV = get_data(dir+tif_name_list[1])
|
|
VH = get_data(dir+tif_name_list[2])
|
|
VV = get_data(dir+tif_name_list[3])
|
|
|
|
data = HH[0]+HH[1]+HV[0]+HV[1]+VH[0]+VH[1]+VV[0]+VV[1]
|
|
# data = HH+HV+VH+VV
|
|
data = np.abs(data)
|
|
|
|
max = int(np.max(data))
|
|
min = int(np.min(data))
|
|
range = int(max) - int(min)
|
|
data1 = data - int(min)
|
|
normalization_data = (data1) /range
|
|
|
|
return normalization_data
|
|
|
|
def create_data1(dir,tif_name_list):
|
|
HH = get_data(dir+tif_name_list[0])
|
|
HV = get_data(dir+tif_name_list[1])
|
|
VH = get_data(dir+tif_name_list[2])
|
|
VV = get_data(dir+tif_name_list[3])
|
|
|
|
# data = HH[0]+HH[1]+HV[0]+HV[1]+VH[0]+VH[1]+VV[0]+VV[1]
|
|
data = HH+HV+VH+VV
|
|
data = np.abs(data)
|
|
return data
|
|
|
|
def img2lonlat(dir,tif_name_list,csv_path,lon_num,lat_num):
|
|
gdal.AllRegister()
|
|
|
|
dataset = gdal.Open(dir+tif_name_list[0])
|
|
rows = dataset.RasterYSize
|
|
cols = dataset.RasterXSize
|
|
data = create_data(dir,tif_name_list)
|
|
# data = create_data1(dir,tif_name_list)
|
|
trans = dataset.GetGeoTransform()
|
|
|
|
f = open(csv_path, 'w', newline='')
|
|
csv_writer = csv.writer(f)
|
|
csv_writer.writerow(['data_time', 'lon', 'lat', 'value'])
|
|
|
|
rows_invert = math.floor(rows/lat_num)
|
|
cols_invert = math.floor(cols / lon_num)
|
|
for r in range(lat_num):
|
|
for c in range(lon_num):
|
|
row = math.floor(rows_invert * r)
|
|
col = math.floor(cols_invert * c)
|
|
|
|
lon = trans[0] + col * trans[1] + row * trans[2]
|
|
lat = trans[3] + col * trans[4] + row * trans[5]
|
|
|
|
localtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
# value = 5 # data[row, col]
|
|
value = random.uniform(6.5, 7.5) # 苏州地区ph为6.5-7.5
|
|
data_list = [localtime, lon, lat, value]
|
|
csv_writer.writerow(data_list)
|
|
pass
|
|
|
|
if __name__ == '__main__':
|
|
|
|
dir = r'E:/Micro/datas/soil_geo/input_back/GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L4_AHV_L10003284994/'
|
|
tif_name_list = ['GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L4_HH_L10003284994.tif',
|
|
'GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L4_HV_L10003284994.tif',
|
|
'GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L4_VH_L10003284994.tif',
|
|
'GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L4_VV_L10003284994.tif']
|
|
|
|
csv_path = dir + '\SoilSalinityMeasureData_GF3_KAS_QPSI_009890_E102.3_N33.6_20180626_L1A_AHV_L10003284994_RPC.csv'
|
|
lon_num = 20
|
|
lat_num = 20
|
|
img2lonlat(dir,tif_name_list,csv_path,lon_num,lat_num)
|
|
pass
|
|
|