diff --git a/sen2chain/__init__.py b/sen2chain/__init__.py
index 83cd50323fe9fb2ce950a214f32d892e9bce4028..1651d4b478d76f59aa8505c7b57ed7be14c84e35 100644
--- a/sen2chain/__init__.py
+++ b/sen2chain/__init__.py
@@ -32,6 +32,7 @@ from .library import Library
 from .data_request import DataRequest
 from .indices import IndicesCollection
 from .download_and_process import DownloadAndProcess
+from .download_eodag import S2cEodag
 from .time_series import TimeSeries
 from .automatization import Automatization
 from .utils import (
@@ -56,6 +57,7 @@ from .multi_processing import (
 from .tileset import TileSet
 from .jobs import Jobs, Job
 
+
 __version__ = "0.7.0"
 __author__ = (
     "Jérémy Commins <jebins@laposte.net> & Impact <pascal.mouquet@ird.fr>"
diff --git a/sen2chain/config.py b/sen2chain/config.py
index d44d7c6feeb42420d9c13efe2ae0f03ea98c544c..27b47680ad293f22c8b11136a5f3318c99822987 100644
--- a/sen2chain/config.py
+++ b/sen2chain/config.py
@@ -22,6 +22,7 @@ SHARED_DATA = dict(
     peps_download=ROOT / "sen2chain" / "peps_download3.py",
     sen2chain_meta=ROOT / "sen2chain" / "data" / "sen2chain_info.xml",
     raw_job_cfg=ROOT / "sen2chain" / "data" / "job_ini.cfg",
+    eodag_centroids_shp=ROOT / "sen2chain" / "data" / "eodag_workspace_locations_tiles" / "sentinel2_tiling_grid_centroids.shp"
 )
 
 
diff --git a/sen2chain/data/eodag_workspace_locations_tiles/custom_locations.yml b/sen2chain/data/eodag_workspace_locations_tiles/custom_locations.yml
deleted file mode 100644
index 32a529959bcf8e00afc1b85fc7aad99e798587d1..0000000000000000000000000000000000000000
--- a/sen2chain/data/eodag_workspace_locations_tiles/custom_locations.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-shapefiles:
-  - name: s2_tile_centroid
-    path: /home/operateur/sen2chain/sen2chain/data/sentinel2_tiling_grid_centroids.shp
-    attr: tile_id
\ No newline at end of file
diff --git a/sen2chain/download_eodag.py b/sen2chain/download_eodag.py
index b81b490970f9f7ed63b7ce37f9650bc2abe4466b..8df64992f2ac27eb4ba6305431b6014ea0446be7 100644
--- a/sen2chain/download_eodag.py
+++ b/sen2chain/download_eodag.py
@@ -6,7 +6,156 @@ Module for downloading Sentinel-2 images using EODAG
 https://www.github.com/CS-SI/EODAG
 
 """
+import logging
+import shapefile
+import os
+from pathlib import Path
+from eodag import EODataAccessGateway
+from eodag import setup_logging
+from .config import SHARED_DATA, Config
 
 
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
 
+ROOT = Path(os.path.realpath(__file__)).parent.parent
+
+class S2cEodag:
+
+    def __init__(
+        self, 
+        name: str = None,
+        provider: str = "peps"
+    ):
+    
+        self.name = name
+        self.provider = provider
+        self.products = None
+        eodag_centroids_shp = SHARED_DATA.get("eodag_centroids_shp")
+        
+        with shapefile.Reader(eodag_centroids_shp) as shp:
+            shaperecs = shp.shapeRecords()
+        
+        locations_yaml_content = """
+        shapefiles:
+          - name: s2_tile_centroid
+            path: {}
+            attr: tile_id
+        """.format(os.path.abspath(eodag_centroids_shp))
+        
+        eodag_custom_location_cfg = os.path.abspath(Config()._CONFIG_DIR / "eodag_custom_locations.yml")
+
+        with open(eodag_custom_location_cfg, "w") as f_yml:
+            f_yml.write(locations_yaml_content.strip())
+        logging.disable(level=logging.WARNING)
+        self.dag = EODataAccessGateway(locations_conf_path = eodag_custom_location_cfg)
+        logging.disable(logging.NOTSET)
+
+        self.dag.set_preferred_provider(self.provider)
+        # logger.info(self.dag.available_providers("S2_MSI_L1C"))
+
+        targeted_tile = [
+            sr
+            for sr in shaperecs
+            if sr.record["tile_id"] == name
+        ][0]
+        
+    
+    def search(
+        self,
+        productType: str = "L1C",
+        start: str = "2015-01-01",
+        end: str = "9999-12-31",
+    
+    ):
+        ######### ici faudrait virer le self.productType, qui ne doit pas être global pour le download... 
+        if productType == "L1C":
+            self.productType = "S2_MSI_L1C"
+            outputs_prefix = Path(Config().get("l1c_path")) / self.name
+        elif productType == "L2A":
+            self.productType = "S2_MSI_L2A"
+            outputs_prefix = Path(Config().get("l2a_path")) / self.name
+
+        default_search_criteria = dict(
+            productType = self.productType,
+            start = start,
+            end = end, 
+            # items_per_page=500,
+        )
+        
+        logging.disable(level=logging.WARNING)
+        self.products = self.dag.search_all(
+            locations = dict(s2_tile_centroid = self.name),
+            **default_search_criteria
+        )
+        logging.disable(logging.NOTSET)
+        
+        fitered = self.products
+        for p in fitered:
+            if self.name not in p.properties["title"]:
+                self.products.remove(p)
+                logger.info("removing wrong tiles from search items: {}".format(p.properties["title"]))        
+            
+        for p in self.products:
+            if (outputs_prefix / (p.properties["title"] + ".SAFE")).exists():
+                p.location = "file://" + str(outputs_prefix / (p.properties["title"] + ".SAFE"))
+                logger.info("{} - remote {} - local PRESENT".format(p.properties["title"], p.properties["storageStatus"]))
+            else:
+                logger.info("{} - remote {} - local ABSENT".format(p.properties["title"], p.properties["storageStatus"]))
+        logger.info("Search returned {} products".format(len(self.products))) ####### rajouter ici "- dont xx ABSENT - dont xx ONLINE / xx STAGING"
+            
+    def download(
+        self,
+        product_id, 
+        outputs_prefix: str = None,
+        extract: bool = True,
+        delete_archive: bool = True,
+    ):
+        if not outputs_prefix:
+            if "L1C" in product_id.properties['title']:
+                root_path = "l1c_path"
+            elif "L2A" in product_id.properties['title']:
+                root_path = "l2a_path"
+            outputs_prefix = str(Path(Config().get(root_path)) / self.name)
+        
+        setup_logging(verbose = 2)
+
+        if product_id.properties["storageStatus"] == "ONLINE":
+            try:
+                # logging.disable(level=logging.WARNING)
+                downloaded_path = self.dag.download(
+                    product_id, 
+                    outputs_prefix = outputs_prefix, 
+                    extract = extract, 
+                    delete_archive = delete_archive,
+                    wait=1,
+                    timeout=0,
+                )
+                # logging.disable(logging.NOTSET)
+
+                if Path(downloaded_path).stem == Path(downloaded_path).parent.stem:
+                    upper_location = Path(downloaded_path).parent.parent / Path(downloaded_path).name
+                    Path(downloaded_path).replace(upper_location)
+                    product_id.location = "file://" + str(upper_location)
+                    Path(downloaded_path).parent.rmdir()
+                    logger.info("Moving up SAVE file")
+            except:
+                logger.info("Error: ONLINE product but cannot be downloaded, retry later")
+
+        elif product_id.properties["storageStatus"] == "OFFLINE":
+            try:
+                setup_logging(verbose = 0)
+                downloaded_path = self.dag.download(
+                    product_id, 
+                    outputs_prefix = outputs_prefix, 
+                    extract = extract, 
+                    delete_archive = delete_archive,
+                    wait=1,
+                    timeout=0,
+                )
+            except:
+                logger.info("OFFLINE product, ordered, retry later")
+
+        elif product_id.properties["storageStatus"] == "STAGING":
+            logger.info("STAGING product, retry later")            
 
diff --git a/sen2chain/tiles.py b/sen2chain/tiles.py
index 12f286c34702874867f76d50c616e8346875e266..8cf3b2f4575521c1ccf1643eaeeaf38187900635 100644
--- a/sen2chain/tiles.py
+++ b/sen2chain/tiles.py
@@ -15,8 +15,12 @@ import os
 from itertools import chain
 from pathlib import Path
 from collections import namedtuple
-from datetime import datetime
+from datetime import datetime, timedelta
 from pprint import pformat
+# import multiprocessing
+from queue import Queue
+from threading import Thread
+# import threading
 
 # type annotations
 from typing import List, Dict, Iterable
@@ -36,6 +40,8 @@ from .multi_processing import (
     cld_version_probability_iterations_reprocessing_multiprocessing,
     idx_multiprocessing,
 )
+from .download_eodag import S2cEodag
+from .utils import datetime_to_str
 
 logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger(__name__)
@@ -715,6 +721,61 @@ class Tile:
                 "cloud_cover": self._products["l2a"][prod].cloud_cover,
             }
         return prodlist
+    
+    def get_l1c(
+        self,
+        provider: str = "peps",
+        download: bool = True,
+        dl_mode: str = "multit",
+        start: str = "2015-01-01",
+        end: str = "9999-12-31",
+        new: bool = False,
+    ):
+        dag = S2cEodag(self.name, provider = provider)
+        if new:
+            start = datetime_to_str(self.l1c.last.date + timedelta(days=1), date_format = 'ymd')
+        dag.search(start = start, end = end)
+        
+        ##### to do 
+        ## choix entre sequential / multithreading / multiprocessing default multithreading
+        
+        if download:
+            
+            ## sequential
+            if dl_mode == "seq":
+                for p in dag.products:
+                    dag.download(p)
+                    
+            ## multithreading
+            elif dl_mode == "multit":
+                NUM_THREADS = 8
+                q = Queue()
+                def do_stuff(q, dag):
+                  while True:
+                    dag.download(q.get())
+                    q.task_done()
+                for p in dag.products:
+                    q.put(p)
+                for t in range(NUM_THREADS):
+                    worker = Thread(target=do_stuff, args = (q, dag))
+                    worker.daemon = True
+                    worker.start()
+                q.join()
+            
+            ## multiprocessing
+            elif dl_mode == "multip":
+                nb_proc = 8
+                nb_proc = max(min(len(os.sched_getaffinity(0)) - 1, nb_proc), 1)
+                pool = multiprocessing.Pool(nb_proc)
+                results = [pool.map(dag.download, dag.products)]
+                pool.close()
+                pool.join()
+        
+                    
+    # def get_l1c_new():
+        # toto = 12   
+        
+
 
     def compute_l2a(
         self,