Skip to content
Snippets Groups Projects
Commit 9d854a0b authored by Jérémy AUCLAIR's avatar Jérémy AUCLAIR
Browse files

Modified LandSat download to include landsat 9 products.

parent 9dc948b3
No related branches found
No related tags found
No related merge requests found
......@@ -24,7 +24,7 @@ from rasterio.enums import Resampling # reprojection algorithms
from datetime import datetime # manage dates
from dateutil.relativedelta import relativedelta # date math
from p_tqdm import p_map # for multiprocessing with progress bars
from dask.diagnostics import ProgressBar # for simple progress bar with dask
from dask.distributed import progress # for simple progress bar with dask
from psutil import cpu_count # to get number of physical cores available
from modspa_pixel.config.config import config # to import config file
from modspa_pixel.preprocessing.input_toolbox import product_str_to_datetime, read_product_info, get_band_paths
......@@ -70,6 +70,7 @@ def download_ndvi_imagery(config_file: str, scaling: int = 255, acorvi_corr: int
# Set api parameters
url = 'https://earth-search.aws.element84.com/v1'
collection = 'sentinel-2-l2a'
query = {'eo:cloud_cover' : {'lt' : cloud_cover_limit}}
modifier = None
# Set data parameters
......@@ -82,11 +83,12 @@ def download_ndvi_imagery(config_file: str, scaling: int = 255, acorvi_corr: int
elif preferred_provider == 'usgs':
# Set api parameters
url = 'https://planetarycomputer.microsoft.com/api/stac/v1'
collection = 'landsat-8-c2-l2'
collection = 'landsat-c2-l2'
query = {'eo:cloud_cover' : {'lt' : 80}, 'platform': {'in': ['landsat-8', 'landsat-9']}},
modifier = sign_inplace
# Set data parameters
red, nir, mask_name = 'SR_B4', 'SR_B5', 'QA_PIXEL'
red, nir, mask_name = 'red', 'nir08', 'qa_pixel'
val1, val2 = 21824, 21824
# Set paths
......@@ -117,10 +119,10 @@ def download_ndvi_imagery(config_file: str, scaling: int = 255, acorvi_corr: int
# Create request
client = Client.open(url, modifier = modifier)
search = client.search(collections = [collection], bbox = bbox, datetime = new_start_date + '/' + new_end_date, query = {"eo:cloud_cover" : {"lt" : cloud_cover_limit}}, max_items = 200)
search = client.search(collections = [collection], bbox = bbox, datetime = new_start_date + '/' + new_end_date, query = query, max_items = 200)
# Get data with required bands
data = load(search.items(), bbox = bbox, groupby = "solar_day", bands = bands, chunks = {}, resolution = resolution, resampling = resampling_dict)
data = load(search.items(), bbox = bbox, groupby = 'solar_day', bands = bands, chunks = {}, resolution = resolution, resampling = resampling_dict)
if preferred_provider == 'usgs':
# Scale optical bands
......@@ -157,14 +159,18 @@ def download_ndvi_imagery(config_file: str, scaling: int = 255, acorvi_corr: int
# Save NDVI cube to netcdf
if mode == 'pixel':
# Set file chunk size
if ndvi.dims['y'] > 500 and ndvi.dims['x'] > 500:
file_chunksize = (ndvi.dims['time'], 200, 200)
else:
file_chunksize = (ndvi.dims['time'], ndvi.dims['y'], ndvi.dims['x'])
# TODO: add dask progress bar
# TODO: add functional dask progress bar
# Save NDVI precube
ndvi.to_netcdf(ndvi_precube_path, encoding = {"NDVI": {"dtype": "u1", "_FillValue": 0, "chunksizes": file_chunksize}})
write_job = ndvi.to_netcdf(ndvi_precube_path, encoding = {"NDVI": {"dtype": "u1", "_FillValue": 0, "chunksizes": file_chunksize}}, compute = False)
write_job = write_job.persist()
progress(write_job)
return ndvi_precube_path
......@@ -390,12 +396,16 @@ def calculate_ndvi(extracted_paths: Union[List[str], str], config_file: str, cal
ndvi['NDVI'].attrs['scale factor'] = str(scaling)
# Save NDVI cube to netcdf
if ndvi.dims['y'] > 1500 and ndvi.dims['x'] > 1500:
if ndvi.dims['y'] > 1000 and ndvi.dims['x'] > 1000:
file_chunksize = (1, interp_chunk['y'], interp_chunk['x'])
else:
file_chunksize = (1, ndvi.dims['y'], ndvi.dims['x'])
# TODO: add dask progress bar
ndvi.to_netcdf(ndvi_cube_path, encoding = {"NDVI": {"dtype": "u1", "_FillValue": 0, "chunksizes": file_chunksize}})
write_job = ndvi.to_netcdf(ndvi_cube_path, encoding = {"NDVI": {"dtype": "u1", "_FillValue": 0, "chunksizes": file_chunksize}}, compute = False)
write_job = write_job.persist()
progress(write_job)
ndvi.close()
return ndvi_cube_path
......@@ -511,12 +521,16 @@ def interpolate_ndvi(ndvi_path: str, config_file: str, chunk_size: dict = {'x':
# Rewrite spatial reference
ndvi['spatial_ref'] = spatial_ref
# Save NDVI cube to netcdf
if ndvi.dims['y'] > 1500 and ndvi.dims['x'] > 1500:
# Set file chunk size
if ndvi.dims['y'] > 1000 and ndvi.dims['x'] > 1000:
file_chunksize = (1, chunk_size['y'], chunk_size['x'])
else:
file_chunksize = (1, ndvi.dims['y'], ndvi.dims['x'])
# TODO: add functional dask progress bar
# Save NDVI cube to netcdf
ndvi.to_netcdf(ndvi_cube_path, encoding = {"NDVI": {"dtype": "u1", "_FillValue": 0, "chunksizes": file_chunksize}})
ndvi.close()
return ndvi_cube_path
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment