Skip to content
Snippets Groups Projects
Commit 7182664d authored by Jérémy AUCLAIR's avatar Jérémy AUCLAIR
Browse files

Added LandSat STAC download. Not fully functional yet.

parent e3f13522
No related branches found
No related tags found
No related merge requests found
......@@ -34,7 +34,7 @@ if __name__ == '__main__':
# Import functions depending on run mode
mode = config_params.mode
if mode == 'pixel':
from modspa_pixel.preprocessing.calculate_ndvi import calculate_ndvi, interpolate_ndvi, download_S2_copernicus
from modspa_pixel.preprocessing.calculate_ndvi import calculate_ndvi, interpolate_ndvi, download_ndvi_imagery
else:
from modspa_pixel.preprocessing.calculate_ndvi import calculate_ndvi_parcel
from modspa_pixel.preprocessing.extract_ndvi import extract_ndvi_stats, filter_raw_ndvi, interpolate_ndvi_parcel
......@@ -81,13 +81,7 @@ if __name__ == '__main__':
image_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS'
ndvi_path = image_path + os.sep + 'NDVI'
if preferred_provider == 'usgs':
# Download and extract LandSat data
csv_extract_file = ndvi_path + os.sep + run_name + os.sep + 'extract.csv'
extracted_images = download_landsat_data(config_params.path_to_eodag_config_file, start_date, end_date, shapefile_path, image_path, csv_extract_file, mode, collection = 'landsat_ot_c2_l2', cloud_cover_limit = config_params.cloud_cover_limit, max_cpu = max_cpu)
elif preferred_provider == 'theia':
if preferred_provider == 'theia':
# Download optical images
csv_download_file = ndvi_path + os.sep + run_name + os.sep + 'download.csv'
raw_images = download_S2_data(config_file, csv_download_file)
......@@ -103,8 +97,8 @@ if __name__ == '__main__':
if config_params.open_browser:
webbrowser.open('http://127.0.0.1:8787/status', new=2, autoraise=True)
if preferred_provider == 'copernicus':
ndvi_precube = download_S2_copernicus(config_file)
if preferred_provider == 'copernicus' or preferred_provider == 'usgs':
ndvi_precube = download_ndvi_imagery(config_file)
# Interpolated downloaded NDVI cube
ndvi_cube = interpolate_ndvi(ndvi_precube, config_file)
......@@ -116,7 +110,7 @@ if __name__ == '__main__':
client.close()
else:
# TODO: adapt S2 Copernicus download script for parcel mode
# TODO: adapt S2 Copernicus and LandSat download script for parcel mode
# Check if NDVI cube already exists
if os.path.exists(ndvi_path + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc') and not ndvi_overwrite: pass
......
......@@ -6,78 +6,117 @@ channels:
- conda-forge
- defaults
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- archspec=0.2.3=pyhd8ed1ab_0
- asttokens=2.0.5=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- boltons=23.1.1=pyhd8ed1ab_0
- brotli-python=1.1.0=py310hc6cd4ac_1
- bzip2=1.0.8=h7b6447c_0
- c-ares=1.18.1=h7f98852_0
- ca-certificates=2023.7.22=hbcca054_0
- certifi=2023.7.22=pyhd8ed1ab_0
- c-ares=1.27.0=hd590300_0
- ca-certificates=2024.2.2=hbcca054_0
- certifi=2024.2.2=pyhd8ed1ab_0
- cffi=1.16.0=py310h2fee648_0
- chardet=5.2.0=py310hff52083_1
- colorama=0.4.6=pyhd8ed1ab_0
- comm=0.1.2=py310h06a4308_0
- conda-package-handling=2.2.0=pyh38be061_0
- conda-package-streaming=0.9.0=pyhd8ed1ab_0
- debugpy=1.5.1=py310h295c915_0
- decorator=5.1.1=pyhd3eb1b0_0
- distro=1.9.0=pyhd8ed1ab_0
- entrypoints=0.4=py310h06a4308_0
- executing=0.8.3=pyhd3eb1b0_0
- fmt=10.2.1=h00ab1b0_0
- icu=73.2=h59595ed_0
- idna=3.6=pyhd8ed1ab_0
- ipykernel=6.19.2=py310h2f386ee_0
- ipython=8.8.0=py310h06a4308_0
- jbig=2.1=h7f98852_2003
- jedi=0.18.1=py310h06a4308_1
- jpeg=9e=h166bdaf_1
- jsonpatch=1.33=pyhd8ed1ab_0
- jsonpointer=2.4=py310hff52083_3
- jupyter_client=7.4.8=py310h06a4308_0
- jupyter_core=5.1.1=py310h06a4308_0
- keyutils=1.6.1=h166bdaf_0
- krb5=1.19.3=h3790be6_0
- krb5=1.21.2=h659d440_0
- ld_impl_linux-64=2.38=h1181459_1
- lerc=2.2.1=h9c3ff4c_0
- libcurl=7.87.0=h91b91d3_0
- libdeflate=1.7=h7f98852_5
- lerc=4.0.0=h27087fc_0
- libarchive=3.7.2=h2aa1ff5_1
- libcurl=8.5.0=hca28451_0
- libdeflate=1.19=hd590300_0
- libedit=3.1.20191231=he28a2e2_2
- libev=4.33=h516909a_1
- libev=4.33=hd590300_2
- libffi=3.4.2=h6a678d5_6
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libnghttp2=1.46.0=hce63b2e_0
- libgcc-ng=13.2.0=h807b86a_5
- libgomp=13.2.0=h807b86a_5
- libiconv=1.17=hd590300_2
- libjpeg-turbo=3.0.0=hd590300_1
- libmamba=1.5.7=had39da4_0
- libmambapy=1.5.7=py310h39ff949_0
- libnghttp2=1.58.0=h47da74e_1
- libnsl=2.0.1=hd590300_0
- libsodium=1.0.18=h7b6447c_0
- libssh2=1.10.0=ha56f1ee_2
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.3.0=hf544144_1
- libuuid=1.41.5=h5eee18b_0
- libwebp-base=1.2.2=h7f98852_1
- lz4-c=1.9.3=h9c3ff4c_1
- libsolv=0.7.28=hfc55251_0
- libsqlite=3.45.2=h2797004_0
- libssh2=1.11.0=h0841786_0
- libstdcxx-ng=13.2.0=h7e041cc_5
- libtiff=4.6.0=ha9c0a0a_2
- libuuid=2.38.1=h0b41bf4_0
- libwebp-base=1.3.2=hd590300_0
- libxcrypt=4.4.36=hd590300_1
- libxml2=2.12.5=h232c23b_0
- libzlib=1.2.13=hd590300_5
- lz4-c=1.9.4=hcb278e6_0
- lzo=2.10=h516909a_1000
- matplotlib-inline=0.1.6=py310h06a4308_0
- menuinst=2.0.2=py310hff52083_0
- ncurses=6.4=h6a678d5_0
- nest-asyncio=1.5.6=py310h06a4308_0
- openssl=1.1.1v=h7f8727e_0
- packaging=22.0=py310h06a4308_0
- openssl=3.2.1=hd590300_0
- packaging=24.0=pyhd8ed1ab_0
- parso=0.8.3=pyhd3eb1b0_0
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pip=22.3.1=py310h06a4308_0
- platformdirs=2.5.2=py310h06a4308_0
- proj=8.2.1=h277dcde_0
- platformdirs=4.2.0=pyhd8ed1ab_0
- pluggy=1.4.0=pyhd8ed1ab_0
- proj=9.3.1=h1d62c97_0
- prompt-toolkit=3.0.36=py310h06a4308_0
- psutil=5.9.0=py310h5eee18b_0
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pure_eval=0.2.2=pyhd3eb1b0_0
- python=3.10.9=h7a1cb2a_0
- pybind11-abi=4=hd8ed1ab_3
- pycosat=0.6.6=py310h2372a71_0
- pycparser=2.21=pyhd8ed1ab_0
- pysocks=1.7.1=pyha2e5f31_6
- python=3.10.13=hd12c33a_1_cpython
- python-dateutil=2.8.2=pyhd3eb1b0_0
- python_abi=3.10=4_cp310
- pyzmq=23.2.0=py310h6a678d5_0
- readline=8.2=h5eee18b_0
- reproc=14.2.4.post0=hd590300_1
- reproc-cpp=14.2.4.post0=h59595ed_1
- requests=2.31.0=pyhd8ed1ab_0
- setuptools=65.6.3=py310h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlite=3.40.1=h5082296_0
- stack_data=0.2.0=pyhd3eb1b0_0
- tk=8.6.12=h1ccaba5_0
- tk=8.6.13=noxft_h4845f30_101
- tornado=6.2=py310h5eee18b_0
- tqdm=4.66.2=pyhd8ed1ab_0
- traitlets=5.7.1=py310h06a4308_0
- truststore=0.8.0=pyhd8ed1ab_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.2.10=h5eee18b_1
- yaml-cpp=0.8.0=h59595ed_0
- zeromq=4.3.4=h2531618_0
- zlib=1.2.13=h5eee18b_0
- zstd=1.5.0=ha95c52a_0
- zlib=1.2.13=hd590300_5
- zstandard=0.22.0=py310h1275a96_0
- zstd=1.5.5=hfc55251_0
- pip:
- aenum==3.1.15
- affine==2.4.0
- alabaster==0.7.13
- appdirs==1.4.4
......@@ -89,6 +128,7 @@ dependencies:
- boto3==1.27.0
- botocore==1.30.0
- bottleneck==1.3.7
- cachetools==5.3.3
- cartopy==0.22.0
- cattrs==23.1.2
- cdsapi==0.6.1
......@@ -102,6 +142,8 @@ dependencies:
- contourpy==1.1.0
- cycler==0.11.0
- dask==2023.6.1
- dask-geopandas==0.3.1
- dataclasses-json==0.6.3
- dill==0.3.6
- distributed==2023.6.1
- docutils==0.18.1
......@@ -109,6 +151,7 @@ dependencies:
- eodag==2.10.0
- eodag-sentinelsat==0.4.1
- esbonio==0.16.1
- et-xmlfile==1.1.0
- eto==1.1.0
- exceptiongroup==1.1.3
- fasteners==0.18
......@@ -127,7 +170,6 @@ dependencies:
- h5netcdf==1.2.0
- h5py==3.9.0
- html2text==2020.1.16
- idna==3.4
- imagesize==1.4.1
- importlib-metadata==6.7.0
- itsdangerous==2.1.2
......@@ -135,7 +177,8 @@ dependencies:
- jmespath==1.0.1
- joblib==1.3.2
- jsonpath-ng==1.5.3
- jsonschema==4.17.3
- jsonschema==4.21.1
- jsonschema-specifications==2023.12.1
- kiwisolver==1.4.4
- line-profiler==4.0.3
- llvmlite==0.40.1
......@@ -145,13 +188,16 @@ dependencies:
- lz4==4.3.2
- markdown-it-py==3.0.0
- markupsafe==2.1.3
- marshmallow==3.20.2
- matplotlib==3.7.1
- mdurl==0.1.2
- memory-profiler==0.61.0
- mercantile==1.2.1
- metpy==1.5.1
- mistune==3.0.1
- msgpack==1.0.5
- multiprocess==0.70.14
- mypy-extensions==1.0.0
- nc-time-axis==1.4.1
- netcdf==66.0.2
- netcdf4==1.6.4
......@@ -160,6 +206,10 @@ dependencies:
- numcodecs==0.11.0
- numpy==1.24.4
- numpy-groupies==0.9.22
- oauthlib==3.2.2
- odc-geo==0.4.3
- odc-stac==0.3.9
- openpyxl==3.1.2
- orjson==3.9.1
- owslib==0.25.0
- p-tqdm==1.4.0
......@@ -183,19 +233,23 @@ dependencies:
- pyrsistent==0.19.3
- pyshp==2.3.1
- pyspellchecker==0.7.2
- pystac==1.8.1
- pystac==1.9.0
- pystac-client==0.7.6
- pytz==2023.3
- pyyaml==6.0
- rasterio==1.3.8
- requests==2.31.0
- referencing==0.33.0
- requests-futures==1.0.1
- requests-oauthlib==1.3.1
- rich==13.5.2
- rioxarray==0.14.1
- rpds-py==0.18.0
- ruamel-yaml==0.17.32
- ruamel-yaml-clib==0.2.7
- s3transfer==0.6.1
- scipy==1.11.1
- seaborn==0.12.2
- sentinelhub==3.10.1
- sentinelsat==1.2.1
- shapely==2.0.1
- simplejson==3.19.1
......@@ -212,13 +266,17 @@ dependencies:
- sphinxcontrib-qthelp==1.0.6
- sphinxcontrib-serializinghtml==1.1.9
- tblib==2.0.0
- tifffile==2024.1.30
- tomli==2.0.1
- tomli-w==1.0.0
- toolz==0.12.0
- tqdm==4.65.0
- typeguard==3.0.2
- typing-extensions==4.7.1
- typing-inspect==0.9.0
- tzdata==2023.3
- urllib3==1.26.16
- usgs==0.3.5
- utm==0.7.0
- werkzeug==2.3.6
- whoosh==2.7.4
- xarray==2023.8.0
......
......@@ -10,6 +10,7 @@ Calculate NDVI images with xarray
import os # for path management
import csv # open csv files
from pystac_client import Client # to send requests to copernicus server
from planetary_computer import sign_inplace # to get access to landsat data
from odc.stac import load # to download copernicus data
from fnmatch import fnmatch # for character string comparison
from typing import List, Union # to declare variables
......@@ -28,11 +29,11 @@ from modspa_pixel.config.config import config # to import config file
from modspa_pixel.preprocessing.input_toolbox import product_str_to_datetime, read_product_info, get_band_paths
# TODO: adapt S2 Copernicus download script for parcel mode
def download_S2_copernicus(config_file: str, scaling: int = 255, acorvi_corr: int = 0) -> str:
# TODO: adapt download script for parcel mode
def download_ndvi_imagery(config_file: str, scaling: int = 255, acorvi_corr: int = 0) -> str:
"""
Use the new Copernicus data ecosystem to search and download clipped and interpolated
S2 data, calculate NDVI and save it into a precube.
Use the new Copernicus data ecosystem or Planetarycomputer ecosystem to search and download
clipped and interpolated S2 or LandSat data, calculate NDVI and save it into a precube.
Arguments
=========
......@@ -53,12 +54,13 @@ def download_S2_copernicus(config_file: str, scaling: int = 255, acorvi_corr: in
"""
# Ignore RuntimeWarnings
warnings.simplefilter("ignore", category=RuntimeWarning)
warnings.simplefilter("ignore", category = RuntimeWarning)
# Open config_file
config_params = config(config_file)
# Load parameters from config file
preferred_provider = config_params.preferred_provider
run_name = config_params.run_name
start_date = config_params.start_date
end_date = config_params.end_date
......@@ -66,8 +68,39 @@ def download_S2_copernicus(config_file: str, scaling: int = 255, acorvi_corr: in
resolution = config_params.resolution
cloud_cover_limit = config_params.cloud_cover_limit
# Set paths
save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI'
if preferred_provider == 'copernicus':
# Set api parameters
url = 'https://earth-search.aws.element84.com/v1'
collection = 'sentinel-2-l2a'
modifier = None
# Set data parameters
red = 'red'
nir = 'nir'
mask_name = 'scl'
val1, val2 = 4, 5
# Set paths
save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI'
elif preferred_provider == 'usgs':
# Set api parameters
url = 'https://planetarycomputer.microsoft.com/api/stac/v1'
collection = 'landsat-8-c2-l2'
modifier = sign_inplace
# Set data parameters
red = 'SR_B4'
nir = 'SR_B5'
mask_name = 'QA_PIXEL'
val1, val2 = 1, 21824
# Set paths
save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI'
# Search parameters
bands = [red, nir, mask_name]
resampling_dict = {red: 'bilinear', nir: 'bilinear', mask_name: 'nearest'}
# Create save path
ndvi_precube_path = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_precube_' + start_date + '_' + end_date + '.nc'
......@@ -86,21 +119,22 @@ def download_S2_copernicus(config_file: str, scaling: int = 255, acorvi_corr: in
new_end_date = (datetime.strptime(end_date, '%Y-%m-%d') + relativedelta(months = 1)).strftime('%Y-%m-%d')
# Create request
client = Client.open("https://earth-search.aws.element84.com/v1")
collection = "sentinel-2-l2a"
search = client.search(collections = [collection], bbox = bbox, datetime = new_start_date + '/' + new_end_date, query = {"eo:cloud_cover" : {"lt" : cloud_cover_limit}}, max_items = 200,)
client = Client.open(url, modifier = modifier)
search = client.search(collections = [collection], bbox = bbox, datetime = new_start_date + '/' + new_end_date, query = {"eo:cloud_cover" : {"lt" : cloud_cover_limit}}, max_items = 200)
# Get data with required bands
data = load(search.items(), bbox = bbox, groupby = "solar_day", bands = ['red', 'nir'], chunks = {}, resolution = resolution)
mask = load(search.items(), bbox = bbox, groupby = "solar_day", bands = ['scl'], chunks = {}, resolution = 20).interp(x = data.coords['x'], y = data.coords['y'], method = 'nearest')
mask = xr.where((mask.scl == 4) | (mask.scl == 5), 1, np.NaN)
data = load(search.items(), bbox = bbox, groupby = "solar_day", bands = bands, chunks = {}, resolution = resolution, resampling = resampling_dict)
# mask = load(search.items(), bbox = bbox, groupby = "solar_day", bands = [mask_name], chunks = {}, resolution = resolution, resampling = 'nearest') #.interp(x = data.coords['x'], y = data.coords['y'], method = 'nearest')
# Create validity mask
mask = xr.where((data[mask_name] == val1) | (data[mask_name] == val2), 1, np.NaN)
# Save single red band as reference tif for later use in reprojection algorithms
ref = data.red.isel(time = 0).rio.write_crs(data.spatial_ref.values)
ref = data[red].isel(time = 0).rio.write_crs(data.spatial_ref.values)
ref.rio.to_raster(save_dir + os.sep + run_name + os.sep + run_name + '_grid_reference.tif')
# Calculate NDVI
ndvi = ((data.nir - data.red - acorvi_corr) / (data.nir + data.red + acorvi_corr) * mask).to_dataset(name = 'NDVI')
ndvi = ((data[nir] - data[red] - acorvi_corr) / (data[nir] + data[red] + acorvi_corr) * mask).to_dataset(name = 'NDVI')
# Convert timestamp to dates
ndvi['time'] = pd.to_datetime(pd.to_datetime(ndvi.time.values, format = '%Y-%m-%d').date)
......@@ -110,7 +144,7 @@ def download_S2_copernicus(config_file: str, scaling: int = 255, acorvi_corr: in
ndvi['NDVI'] = xr.where(ndvi.NDVI < 0, 0, ndvi.NDVI)
# Scale
ndvi['NDVI'] = (ndvi.NDVI * scaling).round().astype(np.uint8)
ndvi['NDVI'] = ((ndvi.NDVI + 1/scaling) * (scaling - 1)).round().astype(np.uint8)
# Write attributes
ndvi['NDVI'].attrs['units'] = 'None'
......@@ -207,6 +241,7 @@ def calculate_ndvi(extracted_paths: Union[List[str], str], config_file: str, chu
nir = xr.open_mfdataset(nir_paths, combine = 'nested', concat_dim = 'time', chunks = chunk_size, parallel = True).squeeze(dim = ['band'], drop = True).rename({'band_data': 'nir'}).sortby('y', ascending = False)
mask = xr.open_mfdataset(mask_paths, combine = 'nested', concat_dim = 'time', chunks = chunk_size, parallel = True).squeeze(dim = ['band'], drop = True).rename({'band_data': 'mask'}).sortby('y', ascending = False)
# TODO: add on the fly reprojection
# Get masking condition and resolution management based on provider, select only data inside the bounds of the input shapefile
if preferred_provider == 'copernicus':
red = red.sel(x = slice(bounds[0], bounds[2]), y = slice(bounds[3], bounds[1]))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment