-
Jeremy Auclair authored
Added more input preparation functions for parcel mode, started writing a global input preparation script. Reused script from modspa for config file.
Jeremy Auclair authoredAdded more input preparation functions for parcel mode, started writing a global input preparation script. Reused script from modspa for config file.
main_prepare_inputs.py 5.07 KiB
# -*- coding: UTF-8 -*-
# Python
"""
11-09-2023
@author: jeremy auclair
Main script to run input scripts to generate input data
"""
if __name__ == '__main__':
import sys, os # system management
currentdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.dirname(currentdir))
from dask.distributed import Client # to parallelise calculations
import webbrowser # to open dask dashboard
from modspa_pixel.config.config import config # to import config file
from modspa_pixel.source.code_toolbox import format_duration # to print memory requirements
from modspa_pixel.preprocessing.input_toolbox import prepare_directories, set_eodag_config_file
from modspa_pixel.preprocessing.download_S2 import download_S2_data, extract_zip_archives
from modspa_pixel.preprocessing.download_ERA5_weather import request_ER5_weather
from time import time # to time code excecution
# Get start time of script
t0 = time()
# Declare paths
config_file = currentdir + os.sep + 'config' + os.sep + 'config_modspa.json'
# Open config file and load parameters
config_params = config(config_file)
# Import functions depending on run mode
mode = config_params.mode
if mode == 'pixel':
from modspa_pixel.preprocessing.calculate_ndvi import calculate_ndvi, interpolate_ndvi
else:
from modspa_pixel.preprocessing.calculate_ndvi import calculate_ndvi_parcel
from modspa_pixel.preprocessing.extract_ndvi import extract_ndvi_stats, filter_raw_ndvi, interpolate_ndvi_parcel
from modspa_pixel.preprocessing.parcel_to_pixel import convert_dataframe_to_xarray
# Run parameters
max_cpu = config_params.max_cpu
run_name = config_params.run_name
preferred_provider = config_params.preferred_provider
start_date = config_params.start_date
end_date = config_params.end_date
ndvi_overwrite = config_params.ndvi_overwrite
# Path parameters
download_path = config_params.download_path
shapefile_path = config_params.shapefile_path
# Prepare directories
prepare_directories(config_file)
# Updating the eodag config file if needed
set_eodag_config_file(config_params.path_to_config_file, config_params.download_path, config_params.preferred_provider)
# Generate inputs
#===== NDVI =====#
if preferred_provider == 'copernicus':
ndvi_path = download_path + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name
else:
ndvi_path = download_path + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name
# Download optical images
csv_download_file = ndvi_path + os.sep + 'download.csv'
raw_images = download_S2_data(start_date, end_date, preferred_provider, csv_download_file, shapefile_path, mode = mode, cloud_cover_limit = config_params.cloud_cover_limit)
# Extract Zip archives
csv_extract_file = ndvi_path + os.sep + 'extract.csv'
extracted_images = extract_zip_archives(download_path, raw_images, preferred_provider, csv_extract_file)
# Calculate and interpolate NDVI
if mode == 'pixel':
client = Client()
client
if config_params.open_browser:
webbrowser.open('http://127.0.0.1:8787/status', new=2, autoraise=True)
# Calculate NDVI
ndvi_precube = calculate_ndvi(extracted_images, config_file)
# Interpolate NDVI
ndvi_cube = interpolate_ndvi(ndvi_precube, config_file)
client.close()
else:
# Calculate NDVI
csv_ndvi_path_file = ndvi_path + os.sep + 'ndvi.csv'
ndvi_path = calculate_ndvi_parcel(extracted_images, ndvi_path, csv_ndvi_path_file, overwrite = ndvi_overwrite, max_cpu = max_cpu)
# Extract NDVI values on shapefile features
csv_ndvi_extract_file = ndvi_path + os.sep + 'ndvi_extract.csv'
raw_ndvi = extract_ndvi_stats(ndvi_path, shapefile_path, csv_ndvi_extract_file, max_cpu = 4)
# Filter NDVI values for each feature
csv_ndvi_filter_file = ndvi_path + os.sep + 'ndvi_filter.csv'
filtered_ndvi = filter_raw_ndvi(raw_ndvi, csv_ndvi_filter_file, max_cpu = max_cpu)
# Interpolate NDVI values to a daily time step
csv_ndvi_interp_file = ndvi_path + os.sep + 'ndvi_interp.csv'
interpolated_ndvi = interpolate_ndvi_parcel(filtered_ndvi, csv_ndvi_interp_file, start_date, end_date, max_cpu = max_cpu)
# Convert DataFrame to netCDF4 file
ndvi_cube = ndvi_cube_path = ndvi_path + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
convert_dataframe_to_xarray(interpolated_ndvi, ndvi_cube, variables = ['NDVI'], data_types = ['u1'])
#===== Weather =====#
# Run weather download and formatting script
weather = request_ER5_weather(config_file, ndvi_cube, shapefile = shapefile_path, mode = mode, raw_S2_image_ref = ndvi_path + os.sep + 'Aurade_test_grid_reference.tif')
# Print formatted runtime
print('\nExcecution time: ', end = '')
format_duration(time() - t0)