diff --git a/config/select_config_modspa.py b/config/select_config_modspa.py
index 6326924dbc7ff6e175e3b62ed2a3fc665d337681..0e024237f0d66d06b9f2baa8ca4f2fd1e496daae 100755
--- a/config/select_config_modspa.py
+++ b/config/select_config_modspa.py
@@ -22,10 +22,10 @@ if __name__ == '__main__':
 
     else:
         currentdir = os.path.dirname(os.path.realpath(__file__))
-        source_file = currentdir + os.sep + 'config_modspa.' + sys.argv[1]+'.json'
+        source_file = os.path.join(currentdir, 'config_modspa.' + sys.argv[1]+'.json')
         if not os.path.exists(source_file):
-            source_file = currentdir + os.sep + sys.argv[1]
-        dest_file = currentdir + os.sep + 'config_modspa.json'
+            source_file = os.path.join(currentdir, sys.argv[1])
+        dest_file = os.path.join(currentdir, 'config_modspa.json')
         print('source file:', source_file)
         print('destination file:', dest_file)
         shutil.copyfile(source_file, dest_file)
diff --git a/main_prepare_inputs.py b/main_prepare_inputs.py
index d53677dfb664edbf60836d4a7feb6f4cd3328518..29db89da9e30db66577ea2f96717c58629d40871 100644
--- a/main_prepare_inputs.py
+++ b/main_prepare_inputs.py
@@ -26,7 +26,7 @@ if __name__ == '__main__':
     t0 = time()
     
     # Declare paths
-    config_file = currentdir + os.sep + 'config' + os.sep + 'config_modspa.json'
+    config_file = os.path.join(currentdir, 'config', 'config_modspa.json')
     
     # Open config file and load parameters
     config_params = config(config_file)
@@ -67,24 +67,24 @@ if __name__ == '__main__':
     #===== NDVI =====#
     
     if preferred_provider == 'copernicus':
-        image_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB'
-        ndvi_path = image_path + os.sep + 'NDVI'
+        image_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB')
+        ndvi_path = os.path.join(image_path, 'NDVI')
         
     elif preferred_provider == 'theia':
-        image_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA'
-        ndvi_path = image_path + os.sep + 'NDVI'
+        image_path = os.path.join(data_path, 'IMAGERY', 'THEIA')
+        ndvi_path = os.path.join(image_path, 'NDVI')
         
     elif preferred_provider == 'usgs':
-        image_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS'
-        ndvi_path = image_path + os.sep + 'NDVI'
+        image_path = os.path.join(data_path, 'IMAGERY', 'USGS')
+        ndvi_path = os.path.join(image_path, 'NDVI')
     
     if preferred_provider == 'theia':
         # Download optical theia images
-        csv_download_file = ndvi_path + os.sep + run_name + os.sep + 'download.csv'
+        csv_download_file = os.path.join(ndvi_path, run_name, 'download.csv')
         raw_images = download_S2_data(config_file, csv_download_file)
         
         # Extract Zip archives
-        csv_extract_file = ndvi_path + os.sep + run_name + os.sep + 'extract.csv'
+        csv_extract_file = os.path.join(ndvi_path, run_name, 'extract.csv')
         extracted_images = extract_zip_archives(image_path, csv_download_file, preferred_provider, resolution, csv_extract_file)
     
     client = Client(silence_logs = 50)
@@ -104,7 +104,7 @@ if __name__ == '__main__':
         
     else:
         # Check if NDVI cube already exists
-        if os.path.exists(ndvi_path + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc') and not ndvi_overwrite: pass
+        if os.path.exists(os.path.join(ndvi_path, run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')) and not ndvi_overwrite: pass
         
         if preferred_provider == 'theia':
             # Calculate NDVI
@@ -115,15 +115,15 @@ if __name__ == '__main__':
             ndvi_precube, ndvi_dates = download_ndvi_imagery(config_file)
         
         # Extract NDVI values on shapefile features
-        csv_ndvi_extract_file = ndvi_path + os.sep + run_name + os.sep + 'ndvi_extract.csv'
+        csv_ndvi_extract_file = os.path.join(ndvi_path, run_name, 'ndvi_extract.csv')
         raw_ndvi = extract_ndvi_stats(ndvi_precube, shapefile_path, ndvi_dates, csv_ndvi_extract_file)
         
         # Process and interpolate NDVI values to a daily time step
-        csv_ndvi_interp_file = ndvi_path + os.sep + run_name + os.sep + 'ndvi_interp.csv'
+        csv_ndvi_interp_file = os.path.join(ndvi_path, run_name, 'ndvi_interp.csv')
         interpolated_ndvi = process_raw_ndvi(raw_ndvi, csv_ndvi_interp_file, start_date, end_date)
         
         # Convert DataFrame to netCDF4 file
-        ndvi_cube = ndvi_path + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_cube = os.path.join(ndvi_path, run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
         convert_dataframe_to_xarray(interpolated_ndvi, ndvi_cube, variables = ['NDVI'], data_types = ['u1'])
     
     # Calculate Kcb max from NDVI cube
@@ -132,7 +132,7 @@ if __name__ == '__main__':
     #===== Weather =====#
     
     # Run weather download and formatting script
-    weather = request_ER5_weather(config_file, ndvi_cube, shapefile = shapefile_path, mode = mode, raw_S2_image_ref = ndvi_path + os.sep + run_name + os.sep + run_name + '_grid_reference.tif')
+    weather = request_ER5_weather(config_file, ndvi_cube, shapefile = shapefile_path, mode = mode, raw_S2_image_ref = os.path.join(ndvi_path, run_name, run_name + '_grid_reference.tif'))
     
     client.close()
     
diff --git a/main_run_samir.py b/main_run_samir.py
index 177ad05d4751930834fb6070ed966c572eecdc2a..aeed8abe21fdd2ac662abf2b2182903307b3d514 100644
--- a/main_run_samir.py
+++ b/main_run_samir.py
@@ -24,7 +24,7 @@ if __name__ == '__main__':
     t0 = time()
     
     # Declare config file path
-    config_file = currentdir + os.sep + 'config' + os.sep + 'config_modspa.json'
+    config_file = os.path.join(currentdir, 'config', 'config_modspa.json')
     
     # Open config file and load parameters
     config_params = config(config_file)
@@ -49,20 +49,21 @@ if __name__ == '__main__':
     # Input paths
     ## NDVI
     if preferred_provider == 'copernicus':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
     elif preferred_provider == 'theia':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
     elif preferred_provider == 'usgs':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', + 'USGS', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
     
+    # TODO: change weather path once weather update is done
     ## Weather
     if mode == 'pixel':
-        weather_path = data_path + os.sep + 'WEATHER' + os.sep + run_name + os.sep + 'ncdailyfiles' + os.sep + start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo.nc'
+        weather_path = os.path.join(data_path, 'WEATHER', run_name, 'ncdailyfiles', start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo.nc')
     else:
-        weather_path = data_path + os.sep + 'WEATHER' + os.sep + run_name + os.sep + 'ncdailyfiles' + os.sep + start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo_parcel.nc'
+        weather_path = os.path.join(data_path, 'WEATHER', run_name, 'ncdailyfiles', start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo_parcel.nc')
     
     # Kcb_max path
-    Kcb_max_obs_path = os.path.dirname(ndvi_path) + os.sep + 'Kcb_max_obs.nc'
+    Kcb_max_obs_path = os.path.join(os.path.dirname(ndvi_path), 'Kcb_max_obs.nc')
     
     # Soil path
     soil_path = config_params.soil_path
@@ -74,9 +75,9 @@ if __name__ == '__main__':
     irrigation_mask_path = config_params.irrigation_mask_path
     
     # Output path
-    output_save_path = output_path + os.sep + run_name + os.sep + config_params.run_name + '_outputs.nc'
-    output_dataframe_path = output_path + os.sep + run_name + os.sep + config_params.run_name + '_outputs.csv'
-    output_shapefile_dir = data_path + os.sep + 'SHP' + os.sep + run_name
+    output_save_path = os.path.join(output_path, run_name, config_params.run_name + '_outputs.nc')
+    output_dataframe_path = os.path.join(output_path, run_name, config_params.run_name + '_outputs.csv')
+    output_shapefile_dir = os.path.join(data_path, 'SHP', run_name)
     
     # Run SAMIR
     run_samir(param_csv_file, ndvi_path, weather_path, soil_path, land_cover_path, irrigation_mask_path, Kcb_max_obs_path, output_save_path, additional_outputs = config_params.additional_output, available_ram = max_ram, available_cpu = max_cpu)
diff --git a/parameters/parameter_estimation.ipynb b/parameters/parameter_estimation.ipynb
index 227aa44d4fb0203e368fd3bdca16f11452e9f102..dcf6d143011040858737a3db0e3b7bf128e4b106 100644
--- a/parameters/parameter_estimation.ipynb
+++ b/parameters/parameter_estimation.ipynb
@@ -40,7 +40,7 @@
     "rcParams.update({'font.size': 15})\n",
     "\n",
     "# Declare config file path\n",
-    "config_file = currentdir + os.sep + 'config' + os.sep + 'config_modspa.json'\n",
+    "config_file = os.path.join(currentdir, 'config', 'config_modspa.json')\n",
     "\n",
     "# Open config file and load parameters\n",
     "config_params = config(config_file)\n",
@@ -58,11 +58,11 @@
     "\n",
     "# Set input paths\n",
     "if preferred_provider == 'copernicus':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "elif preferred_provider == 'theia':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "else:\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'USGS', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "    \n",
     "land_cover_path = config_params.land_cover_path"
    ]
diff --git a/postprocessing/explore_parcel_outputs.ipynb b/postprocessing/explore_parcel_outputs.ipynb
index 151ddf1a9a6e73ea214eda15587a2cddb5415a82..8cd649a9f50708136ee99e3213acd3ba3edcae3d 100644
--- a/postprocessing/explore_parcel_outputs.ipynb
+++ b/postprocessing/explore_parcel_outputs.ipynb
@@ -55,7 +55,7 @@
     "rcParams.update({'font.size': 15})\n",
     "\n",
     "# Load configuration file\n",
-    "config_file = parentdir + os.sep + 'config' + os.sep + 'config_modspa.json'\n",
+    "config_file = os.path.join(parentdir, 'config', 'config_modspa.json')\n",
     "config_params = config(config_file)\n",
     "\n",
     "# Get parameters\n",
@@ -68,19 +68,19 @@
     "# Load paths\n",
     "shapefile_path = config_params.shapefile_path\n",
     "\n",
-    "output_dataset_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.nc'\n",
-    "output_dataframe_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.csv'\n",
+    "output_dataset_path = os.path.join(output_path, run_name, run_name + '_outputs.nc')\n",
+    "output_dataframe_path = os.path.join(output_path, run_name, run_name + '_outputs.csv')\n",
     "\n",
     "land_cover_path = config_params.land_cover_path\n",
     "\n",
     "if config_params.preferred_provider == 'copernicus':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "elif config_params.preferred_provider == 'theia':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "elif config_params.preferred_provider == 'usgs':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'USGS', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "    \n",
-    "weather_path = data_path + os.sep + 'WEATHER' + os.sep + run_name + os.sep + 'ncdailyfiles' + os.sep + start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo_df.csv'\n",
+    "weather_path = os.path.join(data_path, 'WEATHER', run_name, 'ncdailyfiles', start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo_df.csv')\n",
     "\n",
     "# Open Land Cover raster\n",
     "land_cover = xr.open_dataarray(land_cover_path)\n",
@@ -218,7 +218,7 @@
     "fig.suptitle('SAMIR model outputs', fontsize = 50, y = 1, x = 0.5);\n",
     "fig.tight_layout()\n",
     "if save_figures:\n",
-    "    fig.savefig(fname = config_params.output_path + os.sep + config_params.run_name + os.sep + config_params.run_name + '_variable_plot.png', dpi = 600)"
+    "    fig.savefig(fname = os.path.join(config_params.output_path, config_params.run_name, config_params.run_name + '_variable_plot.png'), dpi = 600)"
    ]
   },
   {
@@ -534,7 +534,7 @@
     "statistics = statistics[['Unit', 'Operation'] + cols].transpose()\n",
     "\n",
     "display(statistics)\n",
-    "statistics.to_csv(path_or_buf = config_params.output_path + os.sep + config_params.run_name + os.sep + config_params.run_name + '_class_statistics.csv')"
+    "statistics.to_csv(path_or_buf = os.path.join(config_params.output_path, config_params.run_name, config_params.run_name + '_class_statistics.csv'))"
    ]
   },
   {
diff --git a/postprocessing/explore_pixel_outputs.ipynb b/postprocessing/explore_pixel_outputs.ipynb
index fe67dc501bb2c79cb3ad7e5489c80eab2241ba67..a5a08a5976b5394382321bf07fff0cbcea7d031a 100644
--- a/postprocessing/explore_pixel_outputs.ipynb
+++ b/postprocessing/explore_pixel_outputs.ipynb
@@ -49,7 +49,7 @@
     "rcParams.update({'font.size': 15})\n",
     "\n",
     "# Load configuration file\n",
-    "config_file = parentdir + os.sep + 'config' + os.sep + 'config_modspa.json'\n",
+    "config_file = os.path.join(parentdir, 'config', 'config_modspa.json')\n",
     "config_params = config(config_file)\n",
     "\n",
     "# Get parameters\n",
@@ -60,16 +60,16 @@
     "end_date = config_params.end_date\n",
     "\n",
     "# Load paths\n",
-    "output_save_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.nc'\n",
+    "output_save_path = output_path, run_name, run_name + '_outputs.nc'\n",
     "land_cover_path = config_params.land_cover_path\n",
     "if config_params.preferred_provider == 'copernicus':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "elif config_params.preferred_provider == 'theia':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "elif config_params.preferred_provider == 'usgs':\n",
-    "    ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'\n",
+    "    ndvi_path = os.path.join(data_path, 'IMAGERY', 'USGS', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')\n",
     "    \n",
-    "weather_path = data_path + os.sep + 'WEATHER' + os.sep + run_name + os.sep + 'ncdailyfiles' + os.sep + start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo.nc'\n",
+    "weather_path = os.path.join(data_path, 'WEATHER', run_name, 'ncdailyfiles', start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo.nc')\n",
     "\n",
     "# Open Land Cover raster\n",
     "land_cover = xr.open_dataarray(land_cover_path)\n",
@@ -144,7 +144,7 @@
     "fig.suptitle('SAMIR model outputs', fontsize = 50, y = 1, x = 0.5);\n",
     "fig.tight_layout()\n",
     "if save_figures:\n",
-    "    fig.savefig(fname = config_params.output_path + os.sep + config_params.run_name + os.sep + config_params.run_name + '_variable_plot.png', dpi = 800)"
+    "    fig.savefig(fname = os.path.join(config_params.output_path, config_params.run_name, config_params.run_name + '_variable_plot.png'), dpi = 800)"
    ]
   },
   {
@@ -460,7 +460,7 @@
     "statistics = statistics[['Unit', 'Operation'] + cols].transpose()\n",
     "\n",
     "display(statistics)\n",
-    "statistics.to_csv(path_or_buf = config_params.output_path + os.sep + config_params.run_name + os.sep + config_params.run_name + '_class_statistics.csv')"
+    "statistics.to_csv(path_or_buf = os.path.join(config_params.output_path, config_params.run_name, config_params.run_name + '_class_statistics.csv'))"
    ]
   },
   {
diff --git a/postprocessing/pixel_to_parcel.py b/postprocessing/pixel_to_parcel.py
index f3c41640cbda2da42edb1df99b361d18d5a60a15..7f14219a05d8aa49e71ad839827ee54cd4493482 100644
--- a/postprocessing/pixel_to_parcel.py
+++ b/postprocessing/pixel_to_parcel.py
@@ -88,7 +88,7 @@ def convert_xarray_to_dataframe(dataset_path: Union[str, xr.Dataset, xr.DataArra
             for var in shapefile_variables:
                 
                 # Create save path
-                shapefile_save_path = output_shapefile_dir + os.sep + var + '_shapefile'
+                shapefile_save_path = os.path.join(output_shapefile_dir, var + '_shapefile')
                 
                 print(f'Merging csv and shapefile: {shapefile_save_path}')
                 
@@ -139,7 +139,7 @@ def convert_xarray_to_dataframe(dataset_path: Union[str, xr.Dataset, xr.DataArra
             new_shapefile = new_shapefile.set_index('id')
             
             # Create save path
-            shapefile_save_path = output_shapefile_dir + os.sep + 'multivariable_shapefile'
+            shapefile_save_path = os.path.join(output_shapefile_dir, 'multivariable_shapefile')
             
             # Save shapefile
             new_shapefile.to_file(shapefile_save_path, index = True)
diff --git a/postprocessing/plot_parcel_GIFs.py b/postprocessing/plot_parcel_GIFs.py
index 6c23902228f4b6fb8dbd47c8ddcae51a421c4238..7233cd9f04b4df1a4765a27cf5b1a78690a645c7 100644
--- a/postprocessing/plot_parcel_GIFs.py
+++ b/postprocessing/plot_parcel_GIFs.py
@@ -68,7 +68,7 @@ if __name__ == '__main__':
     cmap_ndvi = get_continuous_cmap(hex_list)
     
     # Declare paths
-    config_file = parentdir + os.sep + 'config' + os.sep + 'config_modspa.json'
+    config_file = os.path.join(parentdir, 'config', 'config_modspa.json')
     
     # Open config file and load parameters
     config_params = config(config_file)
@@ -88,16 +88,16 @@ if __name__ == '__main__':
     shapefile_path = config_params.shapefile_path
     
     # Output path
-    output_save_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.nc'
-    output_dataframe_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.csv'
+    output_save_path = os.path.join(output_path, run_name, run_name + '_outputs.nc')
+    output_dataframe_path = os.path.join(output_path, run_name, run_name + '_outputs.csv')
     
     # NDVI input path
     if preferred_provider == 'copernicus':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + 'ndvi_interp.csv'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, 'ndvi_interp.csv')
     elif preferred_provider == 'theia':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + 'ndvi_interp.csv'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, 'ndvi_interp.csv')
     elif preferred_provider == 'usgs':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + 'ndvi_interp.csv'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'USGS', 'NDVI', run_name, 'ndvi_interp.csv')
 
     # Convert output netCDF to pandas dataframe
     output_dataset = xr.open_dataset(output_save_path)
@@ -112,9 +112,9 @@ if __name__ == '__main__':
     shapefile = gpd.read_file(shapefile_path)
     
     # Save path
-    output_gif_path = output_path  + os.sep + run_name + os.sep + 'GIF'
+    output_gif_path = os.path.join(output_path , run_name, 'GIF')
     if not os.path.exists(output_gif_path): os.mkdir(output_gif_path)
-    gif_var_path = output_gif_path + os.sep + variable + '.gif'
+    gif_var_path = os.path.join(output_gif_path, variable + '.gif')
     
     # Define plot function
     def plot_image(args: tuple) -> str:
@@ -206,7 +206,7 @@ if __name__ == '__main__':
         cx.add_basemap(ax, crs = shapefile_plot.crs, source = xyz.OpenTopoMap, alpha = 0.6, reset_extent = True, attribution = False)
         
         # Create save_path and save
-        figure_path = output_gif_path + os.sep + f'{day+1:03d}_' + variable + '_.png'
+        figure_path = os.path.join(output_gif_path, f'{day+1:03d}_' + variable + '_.png')
         fig.savefig(figure_path, dpi = 100, bbox_inches='tight')
         
         # Release memory
diff --git a/postprocessing/plot_pixel_GIFs.py b/postprocessing/plot_pixel_GIFs.py
index 2dfff6ad1516a9a795e5791c72df60b94cfd493f..02ac0c656ce1d2de7675f89e470c03084d6105be 100644
--- a/postprocessing/plot_pixel_GIFs.py
+++ b/postprocessing/plot_pixel_GIFs.py
@@ -66,7 +66,7 @@ if __name__ == '__main__':
     cmap_ndvi = get_continuous_cmap(hex_list)
     
     # Declare paths
-    config_file = parentdir + os.sep + 'config' + os.sep + 'config_modspa.json'
+    config_file = os.path.join(parentdir, 'config', 'config_modspa.json')
     
     # Open config file and load parameters
     config_params = config(config_file)
@@ -86,15 +86,15 @@ if __name__ == '__main__':
     land_cover_path = config_params.land_cover_path
     
     # Output path
-    output_save_path = output_path + os.sep + run_name + os.sep + run_name + '_outputs.nc'
+    output_save_path = os.path.join(output_path, run_name, run_name + '_outputs.nc')
     
     # NDVI input path
     if preferred_provider == 'copernicus':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'SCIHUB', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
     elif preferred_provider == 'theia':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'THEIA', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
     elif preferred_provider == 'usgs':
-        ndvi_path = data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI' + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc'
+        ndvi_path = os.path.join(data_path, 'IMAGERY', 'USGS', 'NDVI', run_name, run_name + '_NDVI_cube_' + start_date + '_' + end_date + '.nc')
         
     footprint_path = '/mnt/e/KAIROUAN/XLAS_Footprint/Footprint_daily.nc'
 
@@ -108,9 +108,9 @@ if __name__ == '__main__':
         land_cover = xr.open_dataarray(land_cover_path)
     
     # Save path
-    output_gif_path = output_path  + os.sep + run_name + os.sep + 'GIF'
+    output_gif_path = os.path.join(output_path , run_name, 'GIF')
     if not os.path.exists(output_gif_path): os.mkdir(output_gif_path)
-    gif_var_path = output_gif_path + os.sep + variable + '.gif'
+    gif_var_path = os.path.join(output_gif_path, variable + '.gif')
     
     # Define plot function
     def plot_image(args: tuple) -> str:
@@ -166,7 +166,7 @@ if __name__ == '__main__':
         cbar.set_label(label = colorbar_label, labelpad = 10)
         
         # Save image
-        figure_path = output_gif_path + os.sep + f'{day+1:03d}_' + variable + '_.png'
+        figure_path = os.path.join(output_gif_path, f'{day+1:03d}_' + variable + '_.png')
         fig.savefig(figure_path, dpi = 100, bbox_inches='tight')
         
         # Release memory
diff --git a/preprocessing/calculate_ndvi.py b/preprocessing/calculate_ndvi.py
index f8daa5d54c185056bf072b3e9198f2199379e521..11e3c25c48f77203e6c2e4931ad04c386902f455 100644
--- a/preprocessing/calculate_ndvi.py
+++ b/preprocessing/calculate_ndvi.py
@@ -90,7 +90,7 @@ def download_ndvi_imagery(config_file: str, interp_chunk: dict = {'x': 400, 'y':
         val1, val2 = 4, 5
         
         # Set paths
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'SCIHUB', 'NDVI')
         
     elif preferred_provider == 'usgs':
         # Set api parameters
@@ -104,15 +104,15 @@ def download_ndvi_imagery(config_file: str, interp_chunk: dict = {'x': 400, 'y':
         val1, val2 = 21824, 21824
         
         # Set paths
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'USGS', 'NDVI')
 
     # Search parameters
     bands = [red, nir, mask_name]
     resampling_dict = {red: 'bilinear', nir: 'bilinear', mask_name: 'nearest'}
     
     # Create save paths
-    ndvi_cube_path = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_' + 'pre' * (mode == 'parcel') + 'cube_' + start_date + '_' + end_date + '.nc' * (mode == 'pixel') + '.tif' * (mode == 'parcel')
-    dates_file = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_precube_' + start_date + '_' + end_date + '_dates.npy'
+    ndvi_cube_path = os.path.join(save_dir, run_name, run_name + '_NDVI_' + 'pre' * (mode == 'parcel') + 'cube_' + start_date + '_' + end_date + '.nc' * (mode == 'pixel') + '.tif' * (mode == 'parcel'))
+    dates_file = os.path.join(save_dir, run_name, run_name + '_NDVI_precube_' + start_date + '_' + end_date + '_dates.npy')
     
     # Check if file exists and ndvi overwrite is false
     if os.path.exists(ndvi_cube_path) and not config_params.ndvi_overwrite:
@@ -148,7 +148,7 @@ def download_ndvi_imagery(config_file: str, interp_chunk: dict = {'x': 400, 'y':
     
     # Save single red band as reference tif for later use in reprojection algorithms
     ref = data[red].isel(time = 0).rio.write_crs(data.spatial_ref.values)
-    ref.rio.to_raster(save_dir + os.sep + run_name + os.sep + run_name + '_grid_reference.tif')
+    ref.rio.to_raster(os.path.join(save_dir, run_name, run_name + '_grid_reference.tif'))
     
     # Calculate NDVI
     ndvi = ((data[nir] - data[red] - acorvi_corr) / (data[nir] + data[red] + acorvi_corr).where(data[nir] + data[red] + acorvi_corr != 0, np.nan) * mask).to_dataset(name = 'NDVI')
@@ -281,11 +281,11 @@ def calculate_ndvi(extracted_paths: Union[List[str], str], config_file: str, cal
     resolution = config_params.resolution
     preferred_provider = config_params.preferred_provider
     if preferred_provider == 'copernicus':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'SCIHUB', 'NDVI')
     elif preferred_provider == 'theia':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'THEIA', 'NDVI')
     elif preferred_provider == 'usgs':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'USGS', 'NDVI')
 
     # If a file name is provided instead of a list of paths, load the csv file that contains the list of paths
     if type(extracted_paths) == str:
@@ -371,11 +371,11 @@ def calculate_ndvi(extracted_paths: Union[List[str], str], config_file: str, cal
     # Recalculate transform
     ref.rio.write_transform(inplace = True)
     ref = ref.set_coords('spatial_ref')
-    ref.rio.to_raster(save_dir + os.sep + run_name + os.sep + run_name + '_grid_reference.tif')
+    ref.rio.to_raster(os.path.join(save_dir, run_name, run_name + '_grid_reference.tif'))
     
     # Create save path
-    ndvi_cube_path = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_' + 'pre' * (mode == 'parcel') + 'cube_' + start_date + '_' + end_date + '.nc' * (mode == 'pixel') + '.tif' * (mode == 'parcel')
-    dates_file = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_precube_' + start_date + '_' + end_date + '_dates.npy'
+    ndvi_cube_path = os.path.join(save_dir, run_name, run_name + '_NDVI_' + 'pre' * (mode == 'parcel') + 'cube_' + start_date + '_' + end_date + '.nc' * (mode == 'pixel') + '.tif' * (mode == 'parcel'))
+    dates_file = os.path.join(save_dir, run_name, run_name + '_NDVI_precube_' + start_date + '_' + end_date + '_dates.npy')
     
     # Check if file exists and ndvi overwrite is false
     if os.path.exists(ndvi_cube_path) and not config_params.ndvi_overwrite:
@@ -521,14 +521,14 @@ def interpolate_ndvi(ndvi_path: str, config_file: str, chunk_size: dict = {'x':
     # Load parameters from config file
     run_name = config_params.run_name
     if config_params.preferred_provider == 'copernicus':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'SCIHUB' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'SCIHUB', 'NDVI')
     elif config_params.preferred_provider == 'theia':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'THEIA' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'THEIA', 'NDVI')
     elif config_params.preferred_provider == 'usgs':
-        save_dir = config_params.data_path + os.sep + 'IMAGERY' + os.sep + 'USGS' + os.sep + 'NDVI'
+        save_dir = os.path.join(config_params.data_path, 'IMAGERY', 'USGS', 'NDVI')
     
     # Create save path
-    ndvi_cube_path = save_dir + os.sep + run_name + os.sep + run_name + '_NDVI_cube_' + config_params.start_date + '_' + config_params.end_date + '.nc'
+    ndvi_cube_path = os.path.join(save_dir, run_name, run_name + '_NDVI_cube_' + config_params.start_date + '_' + config_params.end_date + '.nc')
     
     # Check if file exists and ndvi overwrite is false
     if os.path.exists(ndvi_cube_path) and not config_params.ndvi_overwrite:
@@ -670,7 +670,7 @@ def calculate_ndvi_image(args: tuple) -> str:
     # Create file name
     file_name = os.path.basename(product_path)
     # file_name = os.path.splitext(file_name)[0]
-    save_name = save_dir + os.sep + file_name + '_NDVI.tif'
+    save_name = os.path.join(save_dir, file_name + '_NDVI.tif')
 
     # Check if file is already written. If override is false, no need to calculate and write ndvi again.
     # If override is true: ndvi is calculated and saved again.
diff --git a/preprocessing/custom_inputs_pixel.ipynb b/preprocessing/custom_inputs_pixel.ipynb
index 0c21aa862a4134421576c0337503e7bef0fbdd8a..065ebd23032dec79b8fd2ef62f91918a376f2004 100644
--- a/preprocessing/custom_inputs_pixel.ipynb
+++ b/preprocessing/custom_inputs_pixel.ipynb
@@ -701,7 +701,7 @@
     "\n",
     "# Create empty weather dataset with NDVI structure\n",
     "uniform_weather = xr.open_dataset(ndvi_path).rename_vars({'NDVI': 'Rain'}).copy(deep = True).transpose('time', 'y', 'x')\n",
-    "dimensions = xr.open_dataset(ndvi_path).dims\n",
+    "dimensions = xr.open_dataset(ndvi_path).sizes\n",
     "\n",
     "# Set dataset attributes\n",
     "uniform_weather.attrs['name'] = 'ModSpa Pixel weather dataset (uniform)'\n",
diff --git a/preprocessing/download_ERA5_weather.py b/preprocessing/download_ERA5_weather.py
index 99389ce71aa250abf21b29a65b896b9930397f5b..59ef77121d161e499daaee4638d395cfd909aca6 100644
--- a/preprocessing/download_ERA5_weather.py
+++ b/preprocessing/download_ERA5_weather.py
@@ -59,11 +59,10 @@ def request_ER5_weather(config_file: str, ndvi_path: str, raw_S2_image_ref: str
     weather_overwrite = config_params.weather_overwrite
     
     # Set output paths
-    outpath = data_path + os.sep + 'WEATHER' + os.sep + run_name
-    save_dir = outpath + os.sep + 'ncdailyfiles'
+    outpath = os.path.join(data_path, 'WEATHER', run_name)
 
     # Create daily wheather ncfile name
-    weather_daily_ncFile = save_dir + os.sep + start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo'
+    weather_daily_ncFile = os.path.join(outpath, start_date + '_' + end_date + '_' + run_name + '_era5-land-daily-meteo')
     
     # Check if file exists and ndvi overwrite is false
     if os.path.exists(weather_daily_ncFile + '.nc' * (mode == 'pixel') + '_parcel.nc' * (mode == 'parcel' )) and not weather_overwrite:
@@ -150,30 +149,9 @@ def request_ER5_weather(config_file: str, ndvi_path: str, raw_S2_image_ref: str
     nb_processes = 4 * min([cpu_count(logical = False), len(os.sched_getaffinity(0)), config_params.max_cpu])  # downloading data demands very little computing power, each processor core can manage multiple downloads
 
     # Call daily data
-    era5land.call_era5land_daily_for_MODSPA(start_date, end_date, era5_expe_polygons_boxbound_wgs84, output_path = outpath, processes = nb_processes)
+    weather_file = era5land.call_era5land_daily(start_date, end_date, era5_expe_polygons_boxbound_wgs84, output_path = outpath)
     
-    # Get list of files
-    list_era5land_hourly_ncFiles = []
-    for file in glob.glob(outpath + os.sep + 'ERA5-land_*'):
-        if era5land.filename_to_datetime(file) >= datetime.strptime(start_date, '%Y-%m-%d').replace(day = 1).date() and era5land.filename_to_datetime(file) <= datetime.strptime(end_date, '%Y-%m-%d').replace(day = 1).date() and file.endswith('.nc'):
-            list_era5land_hourly_ncFiles.append(file)
-    list_era5land_hourly_ncFiles.sort()
-    
-    for ncfile in list_era5land_hourly_ncFiles:
-        print(ncfile)
-    
-    if os.path.exists(outpath+os.sep+'ncdailyfiles'):
-        print('path for nc daily files: ', save_dir)
-    else:
-        os.mkdir(outpath+os.sep+'ncdailyfiles')
-        print('mkdir path for nc daily files: ', save_dir)
-    print('----------')
-
-    # Temporary save directory for daily file merge
-    variable_list = ['2m_temperature_daily_maximum', '2m_temperature_daily_minimum', 'total_precipitation_daily_maximum', '10m_u_component_of_wind_daily_mean', '10m_v_component_of_wind_daily_mean', 'surface_solar_radiation_downwards_daily_maximum']
-
-    # Aggregate monthly files
-    aggregated_files = era5land.concat_monthly_nc_file(list_era5land_hourly_ncFiles, variable_list, save_dir)
+    print('path to daily weather netCDF file: ', weather_file)
     
     # Generate pandas dataframe for parcel mode
     if mode == 'parcel':
@@ -187,7 +165,7 @@ def request_ER5_weather(config_file: str, ndvi_path: str, raw_S2_image_ref: str
             return weather_dataset
         
         # Generate daily weather datasets as Geotiffs for each variable
-        weather_daily_rain, weather_daily_ET0 = era5land.era5Land_daily_to_yearly_parcel(aggregated_files, weather_daily_ncFile, start_date, end_date, h = wind_height)
+        weather_daily_rain, weather_daily_ET0 = era5land.era5Land_daily_to_yearly_parcel(weather_file, weather_daily_ncFile, start_date, end_date, h = wind_height)
         
         # Generate and save weather dataframe
         era5land.extract_weather_dataframe(weather_daily_rain, weather_daily_ET0, shapefile, config_file, weather_datframe)
@@ -200,7 +178,7 @@ def request_ER5_weather(config_file: str, ndvi_path: str, raw_S2_image_ref: str
         return weather_dataset
     
     # Calculate ET0 over the whole time period
-    weather_daily_ncFile = era5land.era5Land_daily_to_yearly_pixel(aggregated_files, weather_daily_ncFile, raw_S2_image_ref, ndvi_path, start_date, end_date, h = wind_height, max_ram = config_params.max_ram, weather_overwrite = weather_overwrite)
+    weather_daily_ncFile = era5land.era5Land_daily_to_yearly_pixel(weather_file, weather_daily_ncFile, raw_S2_image_ref, ndvi_path, start_date, end_date, h = wind_height, max_ram = config_params.max_ram, weather_overwrite = weather_overwrite)
     
     print('\nWeather dataset:', weather_daily_ncFile)
 
diff --git a/preprocessing/download_S2.py b/preprocessing/download_S2.py
index 3cfe4b9f0abcbd6d4978850d27d6481cca3f77f1..0dddc0a88c2e1c0ca3d30902b11230ed8c8e3ba6 100644
--- a/preprocessing/download_S2.py
+++ b/preprocessing/download_S2.py
@@ -184,7 +184,7 @@ def extract_zip_file(args: tuple) -> str:
     image_path, file_path, bands_to_extract, remove_archive = args
     
     # Get path in which to extract the archive
-    extract_path = image_path + os.sep + os.path.basename(file_path)[:-4]
+    extract_path = os.path.join(image_path, os.path.basename(file_path)[:-4])
     
     # Extract desired bands from tar file
     with zp.ZipFile(file_path, mode = 'r') as myzip:
@@ -195,13 +195,13 @@ def extract_zip_file(args: tuple) -> str:
                 if fnmatch(f, band):
                     # Check if already extacted
                     f_name = os.path.basename(f)
-                    if not os.path.exists(extract_path + os.sep + f_name):
+                    if not os.path.exists(os.path.join(extract_path, f_name)):
                     
                         # Extract file
                         myzip.extract(f, path = extract_path)
                     
                         # Move extracted file to the root of the directory
-                        shutil.move(extract_path + os.sep + f, extract_path + os.sep + f_name)
+                        shutil.move(os.path.join(extract_path, f), os.path.join(extract_path,+ f_name))
     
     # Remove unecessary empty directories
     try:
diff --git a/preprocessing/download_landsat.py b/preprocessing/download_landsat.py
index 9abb412f74cab8be17fa1459b90c3502d5815b10..151761ed3025dd093ec240cb4f50fe1f25fb105c 100644
--- a/preprocessing/download_landsat.py
+++ b/preprocessing/download_landsat.py
@@ -122,7 +122,7 @@ def download_file_multiprocess(args: tuple) -> str:
         file_name = url.split('=')[1].split('&')[0] + '.tar'
     else:
         file_name = url.split('/')[-1]
-    file_path = file_path + os.sep + file_name
+    file_path = os.path.join(file_path, file_name)
     extract_path = file_path[:-4] #+ os.sep
     
     # Check if file exists
diff --git a/preprocessing/lib_era5_land_pixel.py b/preprocessing/lib_era5_land_pixel.py
index e20fd11b08178c3425a4098059b0a592a78f4160..3db3087925978bf6eb7cbbed3cba846956141206 100644
--- a/preprocessing/lib_era5_land_pixel.py
+++ b/preprocessing/lib_era5_land_pixel.py
@@ -91,7 +91,8 @@ def era5_enclosing_shp_aera(area: List[float], pas: float) -> Tuple[float, float
     return era5_area  # [N,W,S,E]
 
 
-def call_era5land_daily(args: Tuple[str, str, str, str, List[int], str]) -> None:
+# TODO: create new function to get daily statistics from hourly data, adapt parameters for each variable type, take inspiration from pySPARSE weather part
+def call_era5land_daily(start_date: str, end_date: str, area: list[float, float, float, float], output_path: str) -> None:
     """
     Query of one month of daily ERA5-land data of a selected variable
     according to a selected statistic
@@ -105,20 +106,14 @@ def call_era5land_daily(args: Tuple[str, str, str, str, List[int], str]) -> None
     
     (packed in args: ``tuple``)
     
-    1. year: ``str``
-        year at YYYY format.
-    2. month: ``str``
-        month at MM format.
-    3. variable: ``str``
-        user-selectable variable
-        cf. Appendix A Table 3 for list of input variables availables.
-    4. statistic: ``str``
-        daily statistic choosed, 3 possibility
-        daily_mean or daily_minimum or daily_maximum.
-    5. area: ``List[int]``
+    1. start_date: ``str``
+        start date in YYYY-MM-DD format
+    2. end_date: ``str``
+        end date in YYYY-MM-DD format
+    3. area: ``List[int]``
         bounding box of the demanded area
         area = [lat_max, lon_min, lat_min, lon_max]
-    6. output_path: ``str``
+    4. output_path: ``str``
         path for output file.
 
     Returns
@@ -126,50 +121,100 @@ def call_era5land_daily(args: Tuple[str, str, str, str, List[int], str]) -> None
     
     ``None``
     """
-    year, month, variable, statistic, area, output_path = args
     
-    # set name of output file for each month (statistic, variable, year, month)
-    output_filename = \
-        output_path+os.sep +\
-        "ERA5-land_"+year+"_"+month+"_"+variable+"_"+statistic+".nc"
-
-    if os.path.isfile(output_filename):
-        print(output_filename, ' already exist')
-    else:
-        try:
-
-            c = cdsapi.Client(timeout=300)
-
-            result = c.service("tool.toolbox.orchestrator.workflow",
-                               params={
-                                   "realm": "c3s",
-                                   "project": "app-c3s-daily-era5-statistics",
-                                   "version": "master",
-                                   "kwargs": {
-                                       "dataset": "reanalysis-era5-land",
-                                       "product_type": "reanalysis",
-                                       "variable": variable,
-                                       "statistic": statistic,
-                                       "year": year,
-                                       "month": month,
-                                       "time_zone": "UTC+00:0",
-                                       "frequency": "1-hourly",
-                                       "grid": "0.1/0.1",
-                                       "area": {"lat": [area[2], area[0]],
-                                                "lon": [area[1], area[3]]}
-                                   },
-                                   "workflow_name": "application"
-                               })
-
-            location = result[0]['location']
-            res = requests.get(location, stream=True)
-            print("Writing data to " + output_filename)
-            with open(output_filename, 'wb') as fh:
-                for r in res.iter_content(chunk_size=1024):
-                    fh.write(r)
-            fh.close()
-        except:
-            print('!! request', variable, '  failed !! -> year', year, 'month', month)
+    # Variable dictionnary
+    dico = {
+        '2m_temperature': ['daily_minimum', 'daily_maximum'],
+        '2m_dewpoint_temperature': ['daily_minimum', 'daily_maximum'],
+        '10m_u_component_of_wind': ['daily_mean'],
+        '10m_v_component_of_wind': ['daily_mean'],
+        'total_precipitation': ['daily_maximum'],
+        'surface_solar_radiation_downwards': ['daily_maximum']
+    }
+    
+    # Create name of output fileatistic+".nc"
+    output_filename = os.path.join(output_path, 'ERA5-land_' + start_date + '_' + end_date + '.nc')
+    print(output_filename)
+    
+    # Get datetime objects for start and end date
+    start = datetime.strptime(start_date, '%Y-%m-%d')
+    end = datetime.strptime(end_date, '%Y-%m-%d')
+
+    # if os.path.isfile(output_filename):
+    #     print(output_filename, ' already exist')
+    # else:
+    #     try:
+
+    #         c = cdsapi.Client(timeout=300)
+
+    #         result = c.service("tool.toolbox.orchestrator.workflow",
+    #                            params={
+    #                                "realm": "c3s",
+    #                                "project": "app-c3s-daily-era5-statistics",
+    #                                "version": "master",
+    #                                "kwargs": {
+    #                                    "dataset": "reanalysis-era5-land",
+    #                                    "product_type": "reanalysis",
+    #                                    "variable": variable,
+    #                                    "statistic": statistic,
+    #                                    "year": year,
+    #                                    "month": month,
+    #                                    "time_zone": "UTC+00:0",
+    #                                    "frequency": "1-hourly",
+    #                                    "grid": "0.1/0.1",
+    #                                    "area": {"lat": [area[2], area[0]],
+    #                                             "lon": [area[1], area[3]]}
+    #                                },
+    #                                "workflow_name": "application"
+    #                            })
+
+    #         location = result[0]['location']
+    #         res = requests.get(location, stream=True)
+    #         print("Writing data to " + output_filename)
+    #         with open(output_filename, 'wb') as fh:
+    #             for r in res.iter_content(chunk_size=1024):
+    #                 fh.write(r)
+    #         fh.close()
+    #     except:
+    #         print('!! request', variable, '  failed !! -> year', year, 'month', month)
+        
+    
+    # Initialize the CDS API client
+    c = cdsapi.Client(timeout = 300)
+
+    # Define the parameters for the data request
+    params = {
+        "dataset": "reanalysis-era5-single-levels",
+        "product_type": "reanalysis",
+        "variable": list(dico.keys()),
+        "statistic": [stat for stats in dico.values() for stat in stats],
+        "year": [str(year) for year in range(start.year, end.year + 1)],
+        "month": [f"{month:02d}" for month in range(1, 13)],
+        "day": [f"{day:02d}" for day in range(1, 32)],
+        "time_zone": "UTC+00:00",
+        "frequency": "daily",
+        "grid": [0.1, 0.1],
+        "area": [area[2], area[0], area[1], area[3]]  # North, West, South, East
+        }
+
+    # Request the data
+    result = c.service("tool.toolbox.orchestrator.workflow", {
+        "realm": "c3s",
+        "project": "app-c3s-daily-era5-statistics",
+        "version": "master",
+        "kwargs": params,
+        "workflow_name": "application"
+        })
+
+    # Get the location of the downloaded data
+    location = result[0]['location']
+
+    # Download the data
+    res = requests.get(location, stream=True)
+    print("Writing data to " + output_filename)
+    with open(output_filename, 'wb') as fh:
+        for r in res.iter_content(chunk_size=1024):
+            fh.write(r)
             
     return None
 
@@ -315,7 +360,7 @@ def concat_monthly_nc_file(list_era5land_monthly_files: List[str], list_variable
 
         # Create file name
         try:
-            concatenated_file = output_path + os.sep + 'era5-land_' + dates[0].strftime('%m-%Y') + '_' + dates[-1].strftime('%m-%Y') + '_' + variable + '.nc'
+            concatenated_file = os.path.join(output_path, 'era5-land_' + dates[0].strftime('%m-%Y') + '_' + dates[-1].strftime('%m-%Y') + '_' + variable + '.nc')
         except:
             print(variable)
         
@@ -349,7 +394,7 @@ def uz_to_u2(u_z: List[float], h: float) -> List[float]:
         average daily wind speed in meters per second (ms- 1 ) measured at 2 m above the ground.
     """
 
-    return u_z*4.87/(np.log(67.8*h - 5.42))
+    return u_z * 4.87/(np.log(67.8 * h - 5.42))
 
 
 def ea_calc(T: float) -> float:
@@ -366,7 +411,7 @@ def ea_calc(T: float) -> float:
     e_a :the actual Vapour pressure in Kpa
     """
     
-    return 0.6108*np.exp(17.27*T/(T+237.15))
+    return 0.6108 * np.exp(17.27 * T / (T + 237.15))
 
 
 def load_variable(file_name: str) -> xr.Dataset:
@@ -653,7 +698,7 @@ def convert_interleave_mode(args: Tuple[str, str, bool]) -> None:
     return None
 
 
-def era5Land_daily_to_yearly_pixel(list_era5land_files: List[str], output_file: str, raw_S2_image_ref: str, ndvi_path: str, start_date: str, end_date: str, h: float = 10, max_ram: int = 8, use_OTB: bool = False, weather_overwrite: bool = False, safran: bool = False) -> str:
+def era5Land_daily_to_yearly_pixel(weather_file: str, output_file: str, raw_S2_image_ref: str, ndvi_path: str, start_date: str, end_date: str, h: float = 10, max_ram: int = 8, use_OTB: bool = False, weather_overwrite: bool = False, safran: bool = False) -> str:
     """
     Calculate ET0 values from the ERA5 netcdf weather variables.
     Output netcdf contains the ET0 and precipitation values for
@@ -663,8 +708,8 @@ def era5Land_daily_to_yearly_pixel(list_era5land_files: List[str], output_file:
     Arguments
     =========
 
-    1. list_era5land_files: ``List[str]``
-        list of netcdf files containing the necessary variables
+    1. weather_file: ``str``
+        path to netCDF raw weather files
     2. output_file: ``str``
         output file name without extension
     3. raw_S2_image_ref: ``str``
@@ -705,14 +750,7 @@ def era5Land_daily_to_yearly_pixel(list_era5land_files: List[str], output_file:
         return None
     
     # Load all monthly files into a single xarray dataset that contains all dates (daily frequency)
-    raw_weather_ds = None
-    for file in list_era5land_files:
-        if not raw_weather_ds:
-            raw_weather_ds = load_variable(file)
-        else:
-            temp = load_variable(file)
-            raw_weather_ds = xr.merge([temp, raw_weather_ds])
-    del temp
+    raw_weather_ds = xr.open_dataset(weather_file, decode_coords = 'all')
     
     # Clip extra dates
     raw_weather_ds = raw_weather_ds.sel({'time': slice(start_date, end_date)}).sortby(variables = 'time')
@@ -864,7 +902,7 @@ def era5Land_daily_to_yearly_pixel(list_era5land_files: List[str], output_file:
     return output_file_final
 
 
-def era5Land_daily_to_yearly_parcel(list_era5land_files: List[str], output_file: str, start_date: str, end_date: str, h: float = 10) -> str:
+def era5Land_daily_to_yearly_parcel(weather_file: str, output_file: str, start_date: str, end_date: str, h: float = 10) -> str:
     """
     Calculate ET0 values from the ERA5 netcdf weather variables.
     Output netcdf contains the ET0 and precipitation values for
@@ -873,8 +911,8 @@ def era5Land_daily_to_yearly_parcel(list_era5land_files: List[str], output_file:
     Arguments
     =========
 
-    1. list_era5land_files: ``List[str]``
-        list of netcdf files containing the necessary variables
+    1. weather_file: ``str``
+        path to netCDF raw weather files
     2. output_file: ``str``
         output file name without extension
     3. start_date: ``str``
@@ -894,14 +932,7 @@ def era5Land_daily_to_yearly_parcel(list_era5land_files: List[str], output_file:
     """
     
     # Load all monthly files into a single xarray dataset that contains all dates (daily frequency)
-    raw_weather_ds = None
-    for file in list_era5land_files:
-        if not raw_weather_ds:
-            raw_weather_ds = load_variable(file)
-        else:
-            temp = load_variable(file)
-            raw_weather_ds = xr.merge([temp, raw_weather_ds])
-    del temp
+    raw_weather_ds = xr.open_dataset(weather_file, decode_coords = 'all')
     
     # Clip extra dates
     raw_weather_ds = raw_weather_ds.sel({'time': slice(start_date, end_date)})
diff --git a/source/modspa_samir.py b/source/modspa_samir.py
index 29c7b81c6853ca472265f384e0dbecbf72934239..8959db836c353889a64782dc3d11890678d853f5 100644
--- a/source/modspa_samir.py
+++ b/source/modspa_samir.py
@@ -52,7 +52,7 @@ def rasterize_samir_parameters(csv_param_file: str, land_cover_raster: str, irri
     """
     
     # Get path of min max csv file
-    min_max_param_file = os.path.dirname(csv_param_file) + os.sep + 'params_samir_min_max.csv'
+    min_max_param_file = os.path.join(os.path.dirname(csv_param_file), 'params_samir_min_max.csv')
 
     # Load samir params into an object
     table_param = samir_parameters(csv_param_file)