diff --git a/apps/drs_spot67_stac_example.py b/apps/drs_spot67_stac_example.py
index ff0749286e2911ae54e8d7f20b9981e763e9dbfa..4f724739cd5538c4b76a53480775abcf16a8dd80 100755
--- a/apps/drs_spot67_stac_example.py
+++ b/apps/drs_spot67_stac_example.py
@@ -4,9 +4,9 @@ import pyotb
 
 def main():
 
-    bbox = scenes.BoundingBox(xmin=-5.51, ymin=41.23, xmax=9.68, ymax=51.16)
+    bbox = scenes.BoundingBox(xmin=2, ymin=45.23, xmax=2.1, ymax=45.26)
     provider = scenes.stac.DinamisSpot67Provider()
-    scs = provider.search(bbox_wgs84=bbox, collections=["spot-6-7-drs"])
+    scs = provider.search(bbox_wgs84=bbox)
     for i, sc in enumerate(scs):
         print(sc)
         pyotb.HaralickTextureExtraction({"in": sc.get_xs(), "out": f"/data/tex_{i}.img"})
diff --git a/apps/mpc_s2_stac_example.py b/apps/mpc_s2_stac_example.py
new file mode 100755
index 0000000000000000000000000000000000000000..51434c1946c742b41f77a40f3af912741056eb01
--- /dev/null
+++ b/apps/mpc_s2_stac_example.py
@@ -0,0 +1,17 @@
+import scenes
+import pyotb
+
+
+def main():
+
+    bbox = scenes.BoundingBox(xmin=2, ymin=45.23, xmax=2.1, ymax=45.26)
+    provider = scenes.stac.MPCProvider()
+    scs = provider.search(bbox_wgs84=bbox)
+    for i, sc in enumerate(scs):
+        print(sc)
+        dyn = pyotb.DynamicConvert(sc.get_10m_bands()[0:256, 0:256, :])
+        dyn.write(f"/tmp/{i}.png")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scenes/auth.py b/scenes/auth.py
new file mode 100644
index 0000000000000000000000000000000000000000..9094bb669ef5b3bf10f5d235f203ebe4338e6079
--- /dev/null
+++ b/scenes/auth.py
@@ -0,0 +1,119 @@
+import os
+import json
+import requests
+import time
+import threading
+import appdirs
+
+
+class OAuth2KeepAlive:
+    """
+    Class to start and keep alive an OAuth2 session
+    """
+    def __init__(self, keycloak_server_url, keycloak_realm, keycloak_client_id, token=None, refresh_callback=None):
+        """
+
+        Args:
+            keycloak_server_url: keycloak server URL. Must ends with ".../auth"
+            keycloak_realm:  keycloak realm
+            keycloak_client_id:  keycloak ID
+            token: keycloak token
+            refresh_callback: optional callback passing the fresh access_token
+        """
+        # Keycloak params
+        self.keycloak_server_url = keycloak_server_url.rstrip("/")
+        assert self.keycloak_server_url.endswith("/auth")
+        self.keycloak_client_id = keycloak_client_id
+        self.keycloak_realm = keycloak_realm
+        base_url = f"{self.keycloak_server_url}/realms/{self.keycloak_realm}/protocol/openid-connect"
+        self.token_endpoint = f"{base_url}/token"
+        self.device_endpoint = f"{base_url}/auth/device"
+        # Requests
+        self.data_base = {"client_id": self.keycloak_client_id, "scope": "offline_access"}
+        self.headers = {"Content-Type": "application/x-www-form-urlencoded"}
+        # Auto-refresh
+        self.update_token_thread = None
+        self.token = token
+        self.refresh_rush_percent = 0.75
+        self.refresh_callback = refresh_callback
+        # Fetch existing refresh_token
+        cfg_pth = appdirs.user_config_dir(appname='scenes_auth')
+        self.token_file = os.path.join(cfg_pth, "token")
+        if not os.path.exists(cfg_pth):
+            os.makedirs(cfg_pth)
+        if os.path.isfile(self.token_file) and not token:
+            try:
+                with open(self.token_file) as json_file:
+                    self.token = json.load(json_file)
+            except:
+                print(f"Warning: unable to read last token from file {self.token_file}")
+                pass
+        # Start auto-refresh
+        if not self.token:
+            self._get_token_from_code()
+        self._refresh_token()
+
+    def _get_token_from_code(self):
+        """
+        OAuth2 authorization code flow
+
+        """
+        ret = requests.post(self.device_endpoint, headers=self.headers, data=self.data_base)
+        if ret.status_code == 200:
+            response = ret.json()
+            if "verification_uri_complete" in response and "device_code" in response:
+                verif_url_comp = response["verification_uri_complete"]
+                device_code = response["device_code"]
+                print(f"Open your browser and go to \033[92m{verif_url_comp}\033[0m to grant access.")
+                print("Waiting for authentication...")
+                start = time.time()
+                assert "expires_in" in response
+                assert "interval" in response
+                data = self.data_base.copy()
+                data.update({"device_code": device_code, "grant_type": "urn:ietf:params:oauth:grant-type:device_code"})
+                while True:
+                    ret = requests.post(self.token_endpoint, headers=self.headers, data=data)
+                    elapsed = start - time.time()
+                    if elapsed > response["expires_in"]:
+                        raise ConnectionError("User has not logged yet and the link has expired")
+                    if ret.status_code != 200:
+                        time.sleep(response["interval"])
+                    else:
+                        self.__renew_token(ret)
+                        return
+        raise ConnectionError(f"Unable to authenticate to device endpoint {self.device_endpoint}")
+
+
+    def _refresh_token(self):
+        """
+        Refresh the token.
+
+        """
+        assert 'refresh_token' in self.token
+        data = self.data_base.copy()
+        data.update({"refresh_token": self.token['refresh_token'], "grant_type": "refresh_token"})
+        ret = requests.post(self.token_endpoint, headers=self.headers, data=data)
+        if ret.status_code == 200:
+            self.__renew_token(ret)
+            assert "expires_in" in self.token
+            delay_secs = int(self.refresh_rush_percent * float(self.token['expires_in']))
+            self.update_token_thread = threading.Timer(delay_secs, self._refresh_token)
+            self.update_token_thread.daemon = True
+            self.update_token_thread.start()
+            return
+        raise ConnectionError(f"Unable to refresh token at endpoint {self.token_endpoint}")
+
+
+    def __renew_token(self, data):
+        """
+        Update the token.
+
+        Args:
+            data: incoming response containing the token
+
+        """
+        self.token = data.json()
+        if self.refresh_callback:
+            self.refresh_callback(self.token)
+        with open(self.token_file, 'w') as fp:
+            json.dump(self.token, fp)
diff --git a/scenes/deepnets.py b/scenes/deepnets.py
index ee02abfec23e3fc83c60e91f36d8f8ebf40508ee..dac78e3b0d76d5e20b65563907e2eb3eaf6c10f7 100644
--- a/scenes/deepnets.py
+++ b/scenes/deepnets.py
@@ -75,7 +75,7 @@ def sr4rs(input_image: str | pyotb.core.otbObject, model_url: str = SR4RS_MODEL_
     tmp_zip = os.path.join(tmp_dir, os.path.basename(model_url))
     tmp_unzipped = os.path.splitext(tmp_zip)[0]
     if not os.path.exists(tmp_unzipped):
-        download.curl_url(model_url, postdata=None, out_file=tmp_zip)
+        download.perform_request(model_url, postdata=None, out_file=tmp_zip)
         with zipfile.ZipFile(tmp_zip, 'r') as zip_ref:
             print("Unzipping model...")
             zip_ref.extractall(tmp_dir)
diff --git a/scenes/download.py b/scenes/download.py
index 5bf9f3f2a9341e50330633b02c74c4680cd30e6a..88931163ed79f9be557d3054f1a41d0dca36ad75 100644
--- a/scenes/download.py
+++ b/scenes/download.py
@@ -63,12 +63,9 @@ theia.download_in_range(bbox, trange, "/tmp/download/", "LEVEL2A")
 from __future__ import annotations
 import datetime
 import hashlib
-import io
-import json
 import os
 from urllib.parse import urlencode
-
-import pycurl
+import requests
 from tqdm.autonotebook import tqdm
 
 from scenes import dates
@@ -106,74 +103,58 @@ def is_file_complete(filename: str, md5sum: str) -> bool:
     return md5sum == compute_md5(filename)
 
 
-def curl_url(url, postdata: dict[str, str], verbose: bool = False, out_file: str = None,
-             header: list[str] = None) -> str:
+def perform_request(url, postdata: dict[str, str] = None, out_file: str = None,
+                    header: list[str] = None) -> dict:
     """
     Use PyCurl to make some requests
 
     Args:
         url: url
         postdata: POST data
-        verbose: boolean (Default value = False)
         out_file: output file (Default value = None)
-        header: header. If None is kept, ['Accept:application/json'] is used (Default value = None)
+        header: header. If None is kept, {'Accept' : 'application/json'} is used (Default value = None)
 
     Returns:
         decoded contents
 
     """
     if not header:
-        header = ['Accept:application/json']
-
-    c = pycurl.Curl()
-    c.setopt(pycurl.URL, url)
-    c.setopt(pycurl.HTTPHEADER, header)
-    c.setopt(pycurl.SSL_VERIFYPEER, False)
-    c.setopt(pycurl.SSL_VERIFYHOST, False)
-    if postdata is not None:
-        c.setopt(pycurl.POST, 1)
-        postfields = urlencode(postdata)
-        c.setopt(pycurl.POSTFIELDS, postfields)
-    storage = io.BytesIO()
-    if verbose:
-        c.setopt(pycurl.VERBOSE, 1)
-    if out_file is not None:
-        with open(out_file, "wb") as fp:
-            progress_bar = None
-            last_download_d = 0
-            print("Downloading", flush=True)
-
-            def _status(download_t, download_d, *_):
-                """Callback function for c.XFERINFOFUNCTION
-                https://stackoverflow.com/questions/19724222/pycurl-attachments-and-progress-functions
-
-                Args:
-                    download_t: total
-                    download_d: already downloaded
-                    *_: any additional param (won't be used)
-
-                """
-                if download_d > 0:
-                    nonlocal progress_bar, last_download_d
-                    if not progress_bar:
-                        progress_bar = tqdm(total=download_t, unit='iB', unit_scale=True)
-                    progress_bar.update(download_d - last_download_d)
-                    last_download_d = download_d
-
-            c.setopt(c.NOPROGRESS, False)
-            c.setopt(c.XFERINFOFUNCTION, _status)
-            c.setopt(pycurl.WRITEDATA, fp)
-            c.perform()
+        header = {'Accept': 'application/json'}
+
+    if out_file:
+        response = requests.get(url, stream=True)
+        total_size_in_bytes = int(response.headers.get('content-length', 0))
+        block_size = 1024  # 1 Kb
+        progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True)
+        with open(out_file, 'wb') as file:
+            for data in response.iter_content(block_size):
+                progress_bar.update(len(data))
+                file.write(data)
+        progress_bar.close()
+        if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
+            print("ERROR, something went wrong")
+        return
+    elif postdata:
+        response = requests.post(url, headers=header, data=postdata)
+        if response.status_code != 200:
+            raise ConnectionError(
+                f"Unable to POST payload {postdata} at url {url} with headers {header} ({response.text})")
     else:
-        c.setopt(pycurl.WRITEFUNCTION, storage.write)
-        c.perform()
-    c.close()
-    content = storage.getvalue()
-    return content.decode(encoding="utf-8", errors="strict")
+        response = requests.get(url, headers=header)
+        if response.status_code != 200:
+            raise ConnectionError(f"Unable to GET url {url} with headers {header} ({response.text})")
+
+    response_type = response.headers.get('content-type')
+    if response_type.startswith("application/json"):
+        return response.json()
+    if response_type.startswith("text/html"):
+        return response.text
+    return None
 
 
 class TheiaDownloader:
     """The TheiaDownloader class enables to download L2A and L3A Theia products"""
+
     def __init__(self, config_file: str, max_records: int = 500):
         """
         Args:
@@ -225,13 +206,13 @@ class TheiaDownloader:
             a string
 
         """
-        return f'{bbox.ymin},{bbox.xmin},{bbox.ymax},{bbox.xmax}'
+        return f'{bbox.xmin},{bbox.ymin},{bbox.xmax},{bbox.ymax}'
 
     def _get_token(self):
         """Get the THEIA token"""
-        postdata_token = {"ident": self.config["login_theia"], "pass": self.config["password_theia"]}
-        url = f"{self.config['serveur']}/services/authenticate/"
-        token = curl_url(url, postdata_token)
+        token = perform_request(f"{self.config['serveur']}/services/authenticate/",
+                                postdata={"ident": self.config["login_theia"],
+                                          "pass": self.config["password_theia"]})
         if not token:
             print("Empty token. Please check your credentials in config file.")
         return token
@@ -258,7 +239,8 @@ class TheiaDownloader:
         url = f"{self.config['serveur']}/{self.config['resto']}/api/collections/SENTINEL2/" \
               f"search.json?{urlencode(dict_query)}"
         print("Ask Theia catalog...")
-        search = json.loads(curl_url(url, None))
+        search = perform_request(url)
+        print(search)
 
         tiles_dict = {}
         for record in search["features"]:
@@ -304,13 +286,13 @@ class TheiaDownloader:
             print(f"Fetching products for tile {tile_name}...")
             for acq_date, description in tile.items():
                 url = f"{description['url']}/?issuerId=theia"
-                header = [f'Authorization: Bearer {token}', 'Content-Type: application/json']
+                header = {'Authorization': f'Bearer {token}', 'Content-Type': 'application/json'}
                 filename = f"{os.path.join(download_dir, description['product_name'])}.zip"
 
                 # Check if the destination file exist and is correct
                 if not is_file_complete(filename, description["checksum"]):
                     print(f"Downloading {acq_date}")
-                    curl_url(url, postdata=None, out_file=filename, header=header)
+                    perform_request(url, out_file=filename, header=header)
                 else:
                     print(f"{acq_date} already downloaded. Skipping.")
                 description["local_file"] = filename
@@ -335,7 +317,7 @@ class TheiaDownloader:
         start_date, end_date = dates_range
 
         dict_query = {
-            "box": self._bbox2str(bbox_wgs84),  # lonmin, latmin, lonmax, latmax
+            "box": self._bbox2str(bbox_wgs84),  # lonmin, latmin, lonmax, latmax  (3, 43, 4, 45)
             "startDate": dates.any2datetime(start_date).strftime("%Y-%m-%d"),
             "completionDate": dates.any2datetime(end_date).strftime("%Y-%m-%d"),
             "maxRecords": self.max_records,
diff --git a/scenes/sentinel.py b/scenes/sentinel.py
index c31d43057ef641c2d63a678ab5cfba8bdaf30227..a1fc288a06e8c1de61be35ace1eac55683f26b07 100644
--- a/scenes/sentinel.py
+++ b/scenes/sentinel.py
@@ -245,7 +245,7 @@ class Sentinel2SceneBase(Scene):
 
     def __init__(self, acquisition_date: datetime.datetime, epsg: int, extent_wgs84: list[tuple(float, float)],
                  assets_paths: dict[str, str], concatenated_bands_dict: dict[str, list[str]],
-                 spectral_keys: list[str], other_imagery_keys: list[str],
+                 spectral_keys: list[str], other_imagery_keys: list[str] = None,
                  spectral_src_class=CommonImagerySource, other_imagery_src_class=CommonImagerySource):
         """
         Initialize the Sentinel-2 Scene
@@ -265,11 +265,12 @@ class Sentinel2SceneBase(Scene):
         """
         # Sources for spectral and other imagery
         sources = {key: partial(spectral_src_class, self, assets_paths[key]) for key in spectral_keys}
-        sources.update({key: partial(other_imagery_src_class, self, assets_paths[key]) for key in other_imagery_keys})
+        if other_imagery_keys:
+            sources.update({key: partial(other_imagery_src_class, self, assets_paths[key])
+                            for key in other_imagery_keys})
 
         # Sources for concatenated spectral bands (e.g. it will create the self.get_b10m_bands() method)
         for key, default_bands_names in concatenated_bands_dict.items():
-            print(f"key={key}, default_bands_names={default_bands_names}")
             sources.update({key: partial(self.__get_bands, spectral_src_class, default_bands_names)})
 
         super().__init__(acquisition_date=acquisition_date, epsg=epsg, extent_wgs84=extent_wgs84,
@@ -323,11 +324,11 @@ class Sentinel2TheiaScene(Sentinel2SceneBase):
         # Retrieve the list of .tif files
         is_zip = self.archive.lower().endswith(".zip")
         if is_zip:
-            print("Input type is a .zip archive")
+            # print("Input type is a .zip archive")
             files = utils.list_files_in_zip(self.archive)
             self.files = [utils.to_vsizip(self.archive, f) for f in files]
         else:
-            print("Input type is a directory")
+            # print("Input type is a directory")
             self.files = utils.find_files_in_all_subdirs(self.archive, "*.tif", case_sensitive=False)
 
         # Concatenation of spectral bands, at each resolution
@@ -480,3 +481,50 @@ def get_downloaded_scenes(download_results: dict[str, dict[str, str]]) -> list[S
             archive = dic['local_file']
             scenes_list.append(get_scene(archive))
     return scenes_list
+
+
+class Sentinel2MPCScene(Sentinel2SceneBase):
+    """class for Sentinel-2 images fro mMicrosoft Planetary Computer"""
+
+    def __init__(self, assets_paths: dict[str, str]):
+        """
+        Args:
+            assets_paths: assets paths
+
+        """
+        # Concatenation of spectral bands, at each resolution
+        spectral_bands_10m_mapping = {"b4": "B04",
+                                      "b3": "B03",
+                                      "b2": "B02",
+                                      "b8": "B08"}
+        spectral_bands_20m_mapping = {"b5": "B05",
+                                      "b6": "B06",
+                                      "b7": "B07",
+                                      "b8a": "B8A",
+                                      "b11": "B11",
+                                      "b12": "B12"}
+        spectral_bands_10m_keys = list(spectral_bands_10m_mapping.keys())
+        spectral_bands_20m_keys = list(spectral_bands_20m_mapping.keys())
+        concatenated_bands_dict = {
+            "10m_bands": spectral_bands_10m_keys,
+            "20m_bands": spectral_bands_20m_keys
+        }
+        mapping = {}
+        mapping.update(spectral_bands_10m_mapping)
+        mapping.update(spectral_bands_20m_mapping)
+
+        # Assets (spectral bands)
+        # In Theia products, the suffixes are always uppercase (e.g. B4, ..., B8A).
+        spectral_keys = spectral_bands_10m_keys + spectral_bands_20m_keys
+        updated_assets_paths = {key: assets_paths[asset_key] for key, asset_key in mapping.items()}
+
+        # Date, extent
+        b2_path = updated_assets_paths["b2"] # ...T45WXU_20221019T062901_B03_10m.tif
+        epsg, extent = get_epsg_extent_wgs84(b2_path)
+        datestr = b2_path.split("_")[-3]  # 20180630T105440
+        acquisition_date = datetime.datetime.strptime(datestr, '%Y%m%dT%H%M%S')
+
+        # Call parent constructor
+        super().__init__(acquisition_date=acquisition_date, epsg=epsg, extent_wgs84=extent,
+                         assets_paths=updated_assets_paths, concatenated_bands_dict=concatenated_bands_dict,
+                         spectral_keys=spectral_keys)
diff --git a/scenes/stac.py b/scenes/stac.py
index 80cd6a9a19f5b4a443d898f6913d9c62468e8a13..e63a92a1e3fc4f75dd005c2d597ae076278f7ad8 100644
--- a/scenes/stac.py
+++ b/scenes/stac.py
@@ -20,58 +20,17 @@ scs[0].get_xs().cld_msk_drilled().write("/tmp/test.tif")
 from __future__ import annotations
 import os
 import tempfile
-import threading
 import datetime
 import pystac
 from pystac_client import Client
 from scenes.spatial import BoundingBox
 from scenes.dates import any2datetime
 from scenes.spot import Spot67DRSScene
+from scenes.sentinel import Sentinel2MPCScene
+from scenes.auth import OAuth2KeepAlive
 import requests
-import time
 from tqdm.autonotebook import tqdm
-
-
-def get_token(keycloak_server_url, keycloak_realm, keycloak_client_id):
-    keycloak_server_url = keycloak_server_url.rstrip("/")
-    assert keycloak_server_url.endswith("auth")
-    base_url = f"{keycloak_server_url}/realms/{keycloak_realm}/protocol/openid-connect"
-    device_endpoint = f"{base_url}/auth/device"
-    token_endpoint = f"{base_url}/token"
-    ret = requests.post(
-        device_endpoint,
-        headers={"Content-Type": "application/x-www-form-urlencoded"},
-        data={"client_id": keycloak_client_id}
-    )
-    if ret.status_code == 200:
-        data = ret.json()
-        if "verification_uri_complete" in data and "device_code" in data:
-            verif_url_comp = data["verification_uri_complete"]
-            device_code = data["device_code"]
-            print(f"Open your browser and go to \033[92m{verif_url_comp}\033[0m to grant access.")
-            print("Waiting for authentication...")
-            start = time.time()
-            durations = {"expires_in": 3600, "interval": 10}
-            for key in durations:
-                if key in data:
-                    durations[key] = data[key]
-            while True:
-                ret = requests.post(
-                    token_endpoint,
-                    headers={"Content-Type": "application/x-www-form-urlencoded"},
-                    data={"client_id": keycloak_client_id,
-                          "device_code": device_code,
-                          "grant_type": "urn:ietf:params:oauth:grant-type:device_code"}
-                )
-                elapsed = start - time.time()
-                if elapsed > durations["expires_in"]:
-                    break
-                if ret.status_code != 200:
-                    time.sleep(durations["interval"])
-                else:
-                    return ret.json()
-    raise ConnectionError(f"Unable to authenticate to device endpoint {device_endpoint}")
-
+import threading
 
 class ProviderBase:
     def __init__(self, url):
@@ -89,8 +48,8 @@ class ProviderBase:
                                     pystac.MediaType.TIFF,
                                     pystac.MediaType.JPEG2000]
 
-    def search(self, collections: list[str], bbox_wgs84: BoundingBox, date_min: datetime.datetime | str = None,
-               date_max: datetime.datetime | str = None, filt: dict = None, query: dict = None):
+    def stac_search(self, collections: list[str], bbox_wgs84: BoundingBox, date_min: datetime.datetime | str = None,
+                    date_max: datetime.datetime | str = None, filt: dict = None, query: dict = None):
         """
         Search an item in a STAC catalog.
         see https://pystac-client.readthedocs.io/en/latest/api.html#pystac_client.Client.search
@@ -132,23 +91,30 @@ class ProviderBase:
 
 
 class DinamisSpot67Provider(ProviderBase):
-    def __init__(self, url="https://stacapi.147.100.200.143.nip.io",
-                 keycloak_server_url="https://stacapi.147.100.200.143.nip.io/auth/",
-                 keycloak_client_id="device-client",
-                 keycloak_realm="dinamis"):
+    def __init__(self, url="https://stacapi.147.100.200.143.nip.io", auth=None):
         super().__init__(url=url)
-        self.keycloak_server_url = keycloak_server_url
-        self.keycloak_client_id = keycloak_client_id
-        self.keycloak_realm = keycloak_realm
         self.temp_dir = tempfile.TemporaryDirectory()
         self.headers_file = os.path.join(self.temp_dir.name, 'headers.txt')
-        self.token = None
-        self.update_token_thread = None
-
-    def get_auth_header(self):
-        assert 'access_token' in self.token
-        access_token = self.token['access_token']
-        return {"Authorization": f"Bearer {access_token}"}
+        self.__auth_headers = None
+        self.__auth_headers_lock = threading.Lock()
+        if not auth:
+            self.auth = OAuth2KeepAlive(
+                keycloak_server_url="https://stacapi.147.100.200.143.nip.io/auth/",
+                keycloak_client_id="device-client",
+                keycloak_realm="dinamis",
+                refresh_callback=self.update_headers
+            )
+
+    def set_auth_headers(self, token):
+        assert 'access_token' in token
+        access_token = token['access_token']
+        with self.__auth_headers_lock:
+            self.__auth_headers = {"Authorization": f"Bearer {access_token}"}
+
+
+    def get_auth_headers(self):
+        with self.__auth_headers_lock:
+            return self.__auth_headers
 
     def get_asset_path(self, asset: pystac.asset) -> str:
         """
@@ -165,52 +131,96 @@ class DinamisSpot67Provider(ProviderBase):
         url = url.replace("/vsicurl/", f"/vsicurl?header_file={self.headers_file}&url=")
         return url
 
-    def update_headers(self):
+    def update_headers(self, token):
         print("update headers")
-        if not self.token:
-            self.token = get_token(keycloak_server_url=self.keycloak_server_url,
-                                   keycloak_realm=self.keycloak_realm,
-                                   keycloak_client_id=self.keycloak_client_id)
-            assert 'access_token' in self.token
-        else:
-            assert self.token
-            assert 'refresh_token' in self.token
-            self.token = self.keycloak_openid.refresh_token(self.token['refresh_token'])
+        self.set_auth_headers(token=token)
         tmp_headers_file = f"{self.headers_file}.tmp"
         old_headers_file = f"{self.headers_file}.old"
-        auth_header = self.get_auth_header()
-        auth_header_str = "\n".join([f'{key}: {value}' for key, value in auth_header.items()])
+        auth_header_str = "\n".join([f'{key}: {value}' for key, value in self.get_auth_headers().items()])
         with open(tmp_headers_file, "w") as text_file:
             text_file.write(auth_header_str)
         if os.path.exists(self.headers_file):
             os.rename(self.headers_file, old_headers_file)
         os.rename(tmp_headers_file, self.headers_file)
-        assert 'expires_in' in self.token
-        delay_secs = int(0.75 * float(self.token['expires_in']))
-        self.update_token_thread = threading.Timer(delay_secs, self.update_headers)
-        self.update_token_thread.daemon = True
-        self.update_token_thread.start()
 
     def search(self, bbox_wgs84: BoundingBox, date_min: datetime.datetime | str = None,
-               date_max: datetime.datetime | str = None, collections: list[str] = None) -> list[Spot67DRSScene]:
+               date_max: datetime.datetime | str = None, collections: list[str] = None,
+               as_generator=False) -> list[Spot67DRSScene]:
         """
-        Search and instantiate Spot-6/7 DRS products from Dinamis
+        Search and instantiate S2 products from Microsoft
 
         Args:
             bbox_wgs84: The bounding box in WGS84 (BoundingBox instance)
             date_min: date min (datetime.datetime or str)
             date_max: date max (datetime.datetime or str)
             collections: list of collections
+            as_generator: return the scenes as generator
 
         Returns:
             `Spot67DRSScene` instances in a list
 
         """
-        self.update_headers()
         if not collections:
             collections = ["spot-6-7-drs"]
-        items = super().search(collections=collections, bbox_wgs84=bbox_wgs84, date_min=date_min, date_max=date_max)
-        return [Spot67DRSScene(
+        items = self.stac_search(collections=collections, bbox_wgs84=bbox_wgs84, date_min=date_min, date_max=date_max)
+        gen = (Spot67DRSScene(
+            assets_paths={key: self.get_asset_path(asset) for key, asset in item.assets.items()},
+            assets_headers=self.get_auth_headers()
+        ) for item in tqdm(items))
+        if as_generator:
+            return gen
+        return list(gen)
+
+
+class MPCProvider(ProviderBase):
+    def __init__(self, url="https://planetarycomputer.microsoft.com/api/stac/v1"):
+        super().__init__(url=url)
+
+    def get_asset_path(self, asset: pystac.asset) -> str:
+        """
+        Overrides parent method
+
+        Args:
+            asset: STAC asset
+
+        Returns:
+            URI
+
+        """
+        url = asset.href
+        if asset.media_type in self.vsicurl_media_types:
+            ret = requests.get(f"https://planetarycomputer.microsoft.com/api/sas/v1/sign?href={url}",
+                               headers={"accept": "application/json"})
+            assert ret.status_code == 200, f"Request returned: {ret.text}"
+            data = ret.json()
+            assert "href" in data
+            new_url = data["href"]
+            return f"/vsicurl/{new_url}"
+        return url
+
+    def search(self, bbox_wgs84: BoundingBox, date_min: datetime.datetime | str = None,
+               date_max: datetime.datetime | str = None, collections: list[str] = None,
+               as_generator=True) -> list[Sentinel2MPCScene]:
+        """
+        Search and instantiate Spot-6/7 DRS products from Dinamis
+
+        Args:
+            bbox_wgs84: The bounding box in WGS84 (BoundingBox instance)
+            date_min: date min (datetime.datetime or str)
+            date_max: date max (datetime.datetime or str)
+            collections: list of collections
+            as_generator: return the scenes as generator
+
+        Returns:
+            `Sentinel2MPCScene` instances in a list
+
+        """
+        if not collections:
+            collections = ["sentinel-2-l2a"]
+        items = self.stac_search(collections=collections, bbox_wgs84=bbox_wgs84, date_min=date_min, date_max=date_max)
+        gen = (Sentinel2MPCScene(
             assets_paths={key: self.get_asset_path(asset) for key, asset in item.assets.items()},
-            assets_headers=self.get_auth_header()
-        ) for item in tqdm(items)]
+        ) for item in tqdm(items))
+        if as_generator:
+            return gen
+        return list(gen)
diff --git a/setup.py b/setup.py
index b12d0d974efb8717e94c89bafcef149814340128..c111e266296678353c8687536a505cea056994a8 100644
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@ setuptools.setup(
     ],
     packages=setuptools.find_packages(),
     python_requires=">=3.6",
-    install_requires=["rtree", "pyotb", "pycurl", "tqdm", "pystac-client"],
+    install_requires=["rtree", "pyotb", "requests", "tqdm", "pystac-client", "appdirs"],
     keywords=["remote sensing", "otb", "orfeotoolbox", "orfeo", "pyotb", "stac"],
     scripts=["apps/drs_spot67_import.py", "apps/s2_download.py", "apps/s2_import.py", "apps/search.py"]
 )