diff --git a/build/lib/process/process.py b/build/lib/process/process.py
index 3c25eb7f467ca22cfd64fa61f07d7a40a9e8fa8b..db4bc65b367d873c51e28c9f459203e8b6e00084 100644
--- a/build/lib/process/process.py
+++ b/build/lib/process/process.py
@@ -9,7 +9,7 @@ from plotly.graph_objs import *
 
 from utils.utils import reshape as r
 import utils.auxdata as ua
-from config import *
+from ..config import *
 
 
 class awr_process:
diff --git a/build/lib/process/process_compar_awr.py b/build/lib/process/process_compar_awr.py
index 6fae7a2bc71803a1f1608dd30916dc6edfa23d19..52b90b52ac0cd618c49e5c82a46d3fb2bbd5ed1c 100644
--- a/build/lib/process/process_compar_awr.py
+++ b/build/lib/process/process_compar_awr.py
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
 from utils.sunposition import sunpos
 import utils.utils as u
 import utils.auxdata as ua
-from process.process import *
+from trios.process import *
 
 coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
 coords = pd.read_csv(coordf, sep=';')
diff --git a/build/lib/process/process_test_setup.py b/build/lib/process/process_test_setup.py
index b3bf378baeb39c2512a4609f6cfa5367d987d71b..7f265bad4996766ba373f5077b1fc89100574b0d 100644
--- a/build/lib/process/process_test_setup.py
+++ b/build/lib/process/process_test_setup.py
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
 
 from utils.sunposition import sunpos
 import utils.utils as u
-from process.process import *
+from trios.process import *
 
 
 coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
diff --git a/build/lib/utils/utils.py b/build/lib/utils/utils.py
index 53ecc6918784092d8b477577764b0d22d29b68a7..015cf7d2d20fcfaf9a4aed66fe9af70472b0b7f8 100644
--- a/build/lib/utils/utils.py
+++ b/build/lib/utils/utils.py
@@ -3,7 +3,7 @@ import numpy as np
 from scipy.interpolate import interp1d
 
 from utils.sunposition import sunpos
-
+from trios.config import *
 
 class awr_data:
     '''
@@ -46,18 +46,23 @@ class awr_data:
         Lsky, wl_Lsky = d.load_csv(self.Lskyf)
         Lt, wl_Lt = d.load_csv(self.Ltf)
 
-        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Lt
+        # ''' interpolate Ed, Lt and Lsky data upon common wavelength'''
+        wl = wl_common
         Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index,
-                             columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
-                             data=intEd)
+        newEd = pd.DataFrame(index=Ed.index,columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl),
+                             names=['param', 'wl']),data=intEd)
+
+        intLt = interp1d(wl_Lt, Lt.values, fill_value='extrapolate')(wl)
+        newLt = pd.DataFrame(index=Lt.index,columns=pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl),
+                             names=['param', 'wl']),data=intLt)
+
         intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
         newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
-                                                                                   names=['param', 'wl']), data=intLsky)
+                               names=['param', 'wl']), data=intLsky)
+
         # merge sensor data on time
-        df = pd.merge_asof(Lt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
         df = pd.merge_asof(df, newLsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
@@ -126,32 +131,36 @@ class iwr_data:
         # Edz.mask(Edz<0,inplace=True)
         # Luz.mask(Luz<0,inplace=True)
 
-
-
         # copy depth data to Ed frame on date index
         # Ed.index = Ed.index.droplevel(level=1)
 
-        #''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Luz
+        #''' interpolate Ed, Edz and Luz data upon common wavelength'''
+        wl = wl_common
         Luz.columns = pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)), names=['param', 'wl'])
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
         newEd = pd.DataFrame(index=Ed.index.get_level_values(0),
                              columns=pd.MultiIndex.from_tuples(list(zip(['Ed'] * len(wl), wl)), names=['param', 'wl']),
                              data=intEd)
+
         intEdz = interp1d(wl_Edz, Edz.values, fill_value='extrapolate')(wl)
         newEdz = pd.DataFrame(index=Edz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Edz'] * len(wl), wl)),
-                                                                                 names=['param', 'wl']), data=intEdz)
+                              names=['param', 'wl']), data=intEdz)
+
+        intLuz = interp1d(wl_Luz, Luz.values, fill_value='extrapolate')(wl)
+        newLuz = pd.DataFrame(index=Luz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)),
+                              names=['param', 'wl']), data=intLuz)
+
 
         # correct depth data for sensor to sensor distance
-        Luz.reset_index(level=1, inplace=True)
-        Luz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
+        newLuz.reset_index(level=1, inplace=True)
+        newLuz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
         # newEd.reset_index(level=1,inplace=True)
 
         newEdz.reset_index(level=1, inplace=True)
         newEdz.iloc[:, 0] = newEdz.iloc[:, 0] + delta_Edz_depth
 
         # merge sensor data on time
-        df = pd.merge_asof(Luz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLuz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
         df = pd.merge_asof(df, newEdz, left_index=True, right_index=True, suffixes=('_Luz', '_Edz'),
                            tolerance=pd.Timedelta("2 seconds"),
@@ -174,7 +183,7 @@ class iwr_data:
     #
     #     dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
     #     if len(file) > 1:
-    #         print('Warning! Multiple files found but only one expected, process first file of the list:')
+    #         print('Warning! Multiple files found but only one expected, trios first file of the list:')
     #         print(file)
     #     file = file[0]
     #     df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
@@ -225,16 +234,19 @@ class swr_data:
         Ed, wl_Ed = data().load_csv(self.Edf)
         Lu0, wl_Lu0 = data().load_csv(self.Lu0f)
 
-        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Lu0
+        # ''' interpolate Ed and Lsky data upon common wavelengths'''
+        wl = wl_common
         Lu0.columns = pd.MultiIndex.from_tuples(zip(['Lu0+'] * len(wl), wl), names=['param', 'wl'])
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
         newEd = pd.DataFrame(index=Ed.index,
                              columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
                              data=intEd)
+        intLu0 = interp1d(wl_Lu0, Lu0.values, fill_value='extrapolate')(wl)
+        newLu0 = pd.DataFrame(index=Lu0.index, columns=pd.MultiIndex.from_tuples(zip(['Lu0'] * len(wl), wl),
+                             names=['param', 'wl']), data=intLu0)
 
         # merge sensor data on time
-        df = pd.merge_asof(Lu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
 
         # add solar angle data and idpr
@@ -261,7 +273,7 @@ class data:
         print(file)
         dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
         if len(file) > 1:
-            print('Warning! Multiple files found but only one expected, process first file of the list:')
+            print('Warning! Multiple files found but only one expected, trios first file of the list:')
             print(file)
         file_ = file[0]
         # df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])
diff --git a/config.py b/config.py
deleted file mode 100644
index fb28a03203a3da990b3bfff26bf3ff38f22e6dd3..0000000000000000000000000000000000000000
--- a/config.py
+++ /dev/null
@@ -1,11 +0,0 @@
-'''where you can set absolute and relative path used in the package'''
-import os
-
-root = os.path.dirname(os.path.abspath(__file__))
-
-M2015_file = os.path.join(root, 'aux/rhoTable_Mobley2015.csv')
-M1999_file = os.path.join(root, 'aux/rhoTable_Mobley1999.csv')
-rhosoaa_fine_file = os.path.join(root, 'aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv')
-rhosoaa_coarse_file = os.path.join(root, 'aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv')
-iopw_file =  os.path.join(root, 'aux/water_coef.txt')
-F0_file =  os.path.join(root, 'aux/Thuillier_2003_0.3nm.dat')
diff --git a/main.py b/main.py
index 6fec2835d9468df10e4d939f91f9278c158041c6..965ab8e956d4abbb4646e5fb6a08db3f4206ae76 100644
--- a/main.py
+++ b/main.py
@@ -11,7 +11,7 @@ import plotly
 import plotly.graph_objs as go
 
 import utils.utils as u
-from process.process import *
+from trios.process import *
 
 # import aeronet
 # from config import *
@@ -83,7 +83,7 @@ for idpr in idprs:
     # iwr = u.iwr_data(idpr, iwrfiles)
     # if iwr.file:
     #     df, wl = iwr.reader(c[1], c[2], c[3])
-    #     Rrs = iwr_process(df, wl).process()
+    #     Rrs = iwr_process(df, wl).trios()
     #     dff = pd.concat([dff, Rrs], axis=1)
 
     # writing output file
diff --git a/setup.py b/setup.py
index e450da80bbb23e295ddcca1fdba2c1b483390345..69e5178b69a6048f36dc0d18a76fbc4a723ef948 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from grs import __version__
 setup(
     name='trios',
     version=__version__,
-    packages=find_packages(),
+    packages=find_packages(exclude=['build']),
     package_data={'': ['*.so']},
     #     # If any package contains *.txt files, include them:
     #     '': ['*.txt'],
@@ -17,7 +17,7 @@ setup(
     license='MIT',
     author='T. Harmel',
     author_email='tristan.harmel@ntymail.com',
-    description='Package to help process TriOS radiometer data for various above-water or in-water setups',
+    description='Package to help trios TriOS radiometer data for various above-water or in-water setups',
     # TODO update Dependent packages (distributions)
     install_requires=['dash','dash_core_components','dash_html_components','pandas', 'scipy', 'numpy', 'netCDF4', 'matplotlib', 'docopt', 'GDAL', 'python-dateutil'],
 
diff --git a/simulation/rho_snapshot.py b/simulation/rho_snapshot.py
index 42be72f79c80bd5bfda6147416bd0e6ff05ab90b..f6b8af3d66482fe573a26bf97bce36165a7562c9 100644
--- a/simulation/rho_snapshot.py
+++ b/simulation/rho_snapshot.py
@@ -4,7 +4,7 @@ import numpy as np
 import cmocean
 import matplotlib.pyplot as plt
 
-from process.process import *
+from trios.process import *
 
 dirfig = os.path.abspath('/DATA/projet/gilerson/fig')
 awr = awr_process()
diff --git a/trios.egg-info/PKG-INFO b/trios.egg-info/PKG-INFO
index 2dce8186f452463e29e80bd41a86b052f3c7dd02..5f1196fcfac5806a4766d42385f9e7c440dcf778 100644
--- a/trios.egg-info/PKG-INFO
+++ b/trios.egg-info/PKG-INFO
@@ -1,7 +1,7 @@
 Metadata-Version: 1.0
 Name: trios
 Version: 1.1.1
-Summary: Package to help process TriOS radiometer data for various above-water or in-water setups
+Summary: Package to help trios TriOS radiometer data for various above-water or in-water setups
 Home-page: https://gitlab.irstea.fr/ETL-TELQUEL/etl/tree/dev/preprocessing/trios
 Author: T. Harmel
 Author-email: tristan.harmel@ntymail.com
diff --git a/trios.egg-info/SOURCES.txt b/trios.egg-info/SOURCES.txt
index bd300bdb4db1283f6d51918a3f6ef7467e93abb6..2299fbd03cb22f7b0ee28e2714f72fa463ad16f3 100644
--- a/trios.egg-info/SOURCES.txt
+++ b/trios.egg-info/SOURCES.txt
@@ -2,14 +2,17 @@ README.md
 setup.py
 aux/__init__.py
 aux/sensors_spec.py
-process/__init__.py
-process/process.py
-process/process_compar_awr.py
-process/process_sabine.py
-process/process_test_setup.py
-process/run_iwr.py
 simulation/__init__.py
 simulation/rho_snapshot.py
+trios/__init__.py
+trios/config.py
+trios/process.py
+trios/process_compar_awr.py
+trios/process_sabine.py
+trios/process_test_setup.py
+trios/run_awr.py
+trios/run_iwr.py
+trios/run_swr.py
 trios.egg-info/PKG-INFO
 trios.egg-info/SOURCES.txt
 trios.egg-info/dependency_links.txt
diff --git a/trios.egg-info/top_level.txt b/trios.egg-info/top_level.txt
index 60bfdba892f3347c5bfa492d9660f3e738aa3cf0..44d99fe34214daa471205cf88fe7da594f57c215 100644
--- a/trios.egg-info/top_level.txt
+++ b/trios.egg-info/top_level.txt
@@ -1,6 +1,6 @@
 aux
 build
-process
 simulation
+trios
 utils
 visu
diff --git a/process/__init__.py b/trios/__init__.py
similarity index 100%
rename from process/__init__.py
rename to trios/__init__.py
diff --git a/trios/config.py b/trios/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..05d5d291871c1035b3b13faca12c1a0f8acc0530
--- /dev/null
+++ b/trios/config.py
@@ -0,0 +1,18 @@
+'''where you can set absolute and relative path used in the package'''
+import os
+import numpy as np
+
+root = os.path.dirname(os.path.abspath(__file__))
+
+M2015_file = os.path.join(root, '../aux/rhoTable_Mobley2015.csv')
+M1999_file = os.path.join(root, '../aux/rhoTable_Mobley1999.csv')
+rhosoaa_fine_file = os.path.join(root, '../aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv')
+rhosoaa_coarse_file = os.path.join(root, '../aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv')
+iopw_file =  os.path.join(root, '../aux/water_coef.txt')
+F0_file =  os.path.join(root, '../aux/Thuillier_2003_0.3nm.dat')
+
+# set common wavelengths on which spectra are reprojected
+# default 320nm to 950 nm each wl_step nm
+wl_step = 3
+wl_common = np.arange(320,950+wl_step,wl_step)
+
diff --git a/process/process.py b/trios/process.py
similarity index 99%
rename from process/process.py
rename to trios/process.py
index 3c25eb7f467ca22cfd64fa61f07d7a40a9e8fa8b..c8148dacc4e90bc44400f8ff459cf80d526b4506 100644
--- a/process/process.py
+++ b/trios/process.py
@@ -1,15 +1,12 @@
-import numpy as np
 import pandas as pd
 from scipy import interpolate, integrate
-from scipy.optimize import curve_fit
 
-import plotly.plotly as py
 # import plotly.graph_objs as go
 from plotly.graph_objs import *
 
 from utils.utils import reshape as r
 import utils.auxdata as ua
-from config import *
+from trios.config import *
 
 
 class awr_process:
diff --git a/process/process_compar_awr.py b/trios/process_compar_awr.py
similarity index 99%
rename from process/process_compar_awr.py
rename to trios/process_compar_awr.py
index 6fae7a2bc71803a1f1608dd30916dc6edfa23d19..52b90b52ac0cd618c49e5c82a46d3fb2bbd5ed1c 100644
--- a/process/process_compar_awr.py
+++ b/trios/process_compar_awr.py
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
 from utils.sunposition import sunpos
 import utils.utils as u
 import utils.auxdata as ua
-from process.process import *
+from trios.process import *
 
 coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
 coords = pd.read_csv(coordf, sep=';')
diff --git a/process/process_sabine.py b/trios/process_sabine.py
similarity index 99%
rename from process/process_sabine.py
rename to trios/process_sabine.py
index e488d27cec96909f09a11bcc0480e082086828eb..71e793c332c5392e0c309da1f102a8540d21a969 100644
--- a/process/process_sabine.py
+++ b/trios/process_sabine.py
@@ -12,7 +12,7 @@ import plotly.graph_objs as go
 import cmocean
 
 import utils.utils as u
-from process.process import *
+from trios.process import *
 from utils.sunposition import sunpos
 
 dir = '/DATA/OBS2CO/data/sabine/data/raw'
diff --git a/process/process_test_setup.py b/trios/process_test_setup.py
similarity index 99%
rename from process/process_test_setup.py
rename to trios/process_test_setup.py
index b3bf378baeb39c2512a4609f6cfa5367d987d71b..7f265bad4996766ba373f5077b1fc89100574b0d 100644
--- a/process/process_test_setup.py
+++ b/trios/process_test_setup.py
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
 
 from utils.sunposition import sunpos
 import utils.utils as u
-from process.process import *
+from trios.process import *
 
 
 coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
diff --git a/trios/run_awr.py b/trios/run_awr.py
new file mode 100644
index 0000000000000000000000000000000000000000..467436d259128dda06abc022c08bd0b314813763
--- /dev/null
+++ b/trios/run_awr.py
@@ -0,0 +1,123 @@
+import os
+import pandas as pd
+import numpy as np
+import glob
+import re
+import datetime
+
+from scipy.interpolate import interp1d
+
+from utils.sunposition import sunpos
+import utils.utils as u
+import utils.auxdata as ua
+from trios.process import *
+
+odir = os.path.abspath('/DATA/OBS2CO/data/trios/above_water')
+dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
+
+awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
+
+coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ_test.csv")[0]
+coords = pd.read_csv(coordf, sep=';')
+coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
+coords['h_debut']=coords['Date_prel'] + pd.to_timedelta(coords['h_debut'])
+coords['h_fin']=coords['Date_prel'] + pd.to_timedelta(coords['h_fin'])
+# TODO warning: time is set as start_time + 15 minutes (can be more accurate)
+coords['Date_prel']= coords['h_debut']+datetime.timedelta(minutes = 15)
+
+iopw = ua.iopw()
+iopw.load_iopw()
+
+def add_curve(ax, x, mean, std, c='red', label=''):
+    ax.plot(x, mean, linestyle='solid', c=c, lw=2.5,
+            alpha=0.8, label=label)
+    ax.fill_between(x,
+                    mean - std,
+                    mean + std, alpha=0.35, color=c)
+
+idpr = '167'
+
+# get idpr numbers
+idprs = np.unique([re.findall(r'idpr(\d+)', x)[0] for x in awrfiles])
+#idprs = np.array(['170'])
+# loop over idpr
+for idpr in idprs:
+    c = coords[coords.ID_prel == int(idpr)]  # .values[0]
+    date=c['Date_prel'].dt.strftime('%Y%m%d')
+    lat = c['Lat'].values[0]
+    lon = c['Lon'].values[0]
+    alt = 0  # c['Altitude']
+    name = c['ID_lac'].values[0]
+
+    ofile=os.path.join(odir,'Rrs_awr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
+    header = c.stack(dropna=False)
+    header.index = header.index.droplevel()
+    header.to_csv(ofile, header=None)
+
+    # -----------------------------------------------
+    #   AWR processing
+    # -----------------------------------------------
+    azi = 135
+    vza = 40
+    awr = u.awr_data(idpr, awrfiles)
+    if awr.Edf:
+
+        index_idx = [0]
+
+        d = u.data(index_idx)
+        Ed, wl_Ed = d.load_csv(awr.Edf)
+        Lsky, wl_Lsky = d.load_csv(awr.Lskyf)
+        Lt, wl_Lt = d.load_csv(awr.Ltf)
+
+        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
+        wl = wl_Lt
+        Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
+        intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
+        newEd = pd.DataFrame(index=Ed.index,
+                             columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
+                             data=intEd)
+        intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
+        newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
+                                                                                   names=['param', 'wl']), data=intLsky)
+
+        awr = awr_process()
+        ws = [2]
+
+        print(azi, vza)
+
+        Lsky = newLsky  # .loc[(newLsky.index.get_level_values(1) ==  vza) & (newLsky.index.get_level_values(2) ==  azi)]
+        Ed = newEd  # .loc[(newEd.index.get_level_values(1) ==  vza) & (newEd.index.get_level_values(2) ==  azi)]
+        
+        # merge sensor data on time
+        df = pd.merge_asof(Lt, Ed, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+                           direction="nearest")
+        df = pd.merge_asof(df, Lsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+                           direction="nearest")
+
+        # add solar angle data and idpr
+        # compute solar angle (mean between fisrt and last aqcuisition time
+        df['sza', ''] = np.nan
+        for index, row in df.iterrows():
+            # print index
+            sza = sunpos(index, lat, lon, alt)[1]
+            df.at[index, 'sza'] = sza
+
+        rho_h = awr.get_rho_values([df.sza.mean()], [vza], [azi], wl=wl)
+        Rrs_h = (df.loc[:, 'Lt'] - rho_h * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
+
+        Rrs_stat = Rrs_h.describe()
+        #Rrs_stat.columns=Rrs_stat.columns.droplevel()
+        Rrs_stat = Rrs_stat.T
+        Rrs_stat.to_csv(ofile,mode='a')
+
+        #-------------------------------
+        # for Mobley values :
+        # rho15 = awr.get_rho_mobley(awr.rhoM2015, [df.sza.mean()], [vza], [azi], [ws])[0]
+        # rho99 = awr.get_rho_mobley(awr.rhoM1999, [df.sza.mean()], [vza], [azi], [ws])[0]
+        #
+        # Rrs15 = (df.loc[:, 'Lt'] - rho15 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
+        # Rrs99 = (df.loc[:, 'Lt'] - rho99 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
+
+
+
+
diff --git a/process/run_iwr.py b/trios/run_iwr.py
similarity index 93%
rename from process/run_iwr.py
rename to trios/run_iwr.py
index 357695a7049450cc842321ac8a1eaf4f9849cc1d..0b17fa7d8e572ccf95355f6f027b8f7097351895 100644
--- a/process/run_iwr.py
+++ b/trios/run_iwr.py
@@ -15,7 +15,7 @@ import plotly.graph_objs as go
 import utils.utils as u
 import utils.auxdata as ua
 
-from process.process import *
+from trios.process import *
 
 
 
@@ -32,13 +32,13 @@ class fit:
 # ------------------------------------------------
 # above-water data files
 dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
-dirout = os.path.abspath('/DATA/OBS2CO/data/trios/in_water')
+odir = os.path.abspath('/DATA/OBS2CO/data/trios/in_water')
 
 iwrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/uw*idpr*.csv")
 
 coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
 coords = pd.read_csv(coordf, sep=';')
-coords
+coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
 # get idpr numbers
 idprs = np.unique([re.findall(r'idpr(\d+)', x)[0] for x in iwrfiles])
 
@@ -65,11 +65,17 @@ for idpr in idprs:#[-1:]:
     print(idpr)
     try:
         c = coords[coords.ID_prel == int(idpr)]  # .values[0]
+        date=c['Date_prel'].dt.strftime('%Y%m%d')
         lat = c['Lat'].values[0]
         lon = c['Lon'].values[0]
         alt = 0  # c['Altitude']
         name = c['ID_lac'].values[0]
 
+        ofile=os.path.join(odir,'Rrs_swr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
+        header = c.stack()
+        header.index = header.index.droplevel()
+        header.to_csv(ofile, header=None)
+
         dff = pd.DataFrame()
 
         # -----------------------------------------------
@@ -81,7 +87,7 @@ for idpr in idprs:#[-1:]:
             reflectance = iwr_process(df, wl_).process()
             df = pd.concat([df, reflectance], axis=1)
 
-            df.to_csv(os.path.join(dirout, 'trios_iwr_' + name + '_idpr' + idpr + '.csv'))
+            #df.to_csv(os.path.join(odir, 'trios_iwr_' + name + '_idpr' + idpr + '.csv'))
 
         mean = df.groupby('rounded_depth').mean()
         median = df.groupby('rounded_depth').median()
diff --git a/process/run_swr.py b/trios/run_swr.py
similarity index 80%
rename from process/run_swr.py
rename to trios/run_swr.py
index 4d6f25ce7a283ba2763fb789ad5ec2f3843a2148..9c1b4620705d1f3064022a1a22fc78c53e1f763f 100644
--- a/process/run_swr.py
+++ b/trios/run_swr.py
@@ -1,33 +1,31 @@
-import base64
+import os
 import pandas as pd
 import numpy as np
 import glob
-import io
-import os
-from textwrap import dedent as d
 import re
-import matplotlib.pyplot as plt
-import plotly
-import plotly.graph_objs as go
+import datetime
+
 from scipy.interpolate import interp1d
 
 from utils.sunposition import sunpos
 import utils.utils as u
 import utils.auxdata as ua
-from process.process import *
+from trios.process import *
 
 plot=False
 odir = os.path.abspath('/DATA/OBS2CO/data/trios/surface_water')
-coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
-coords = pd.read_csv(coordf, sep=';')
-coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
-
 dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
 
-awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
-# awrfiles = glob.glob("/DATA/OBS2CO/data/trios/test_setup/raw/aw*idpr*.csv")
 swrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/Lu0*idpr*.csv")
 
+coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ_test.csv")[0]
+coords = pd.read_csv(coordf, sep=';')
+coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
+coords['h_debut']=coords['Date_prel'] + pd.to_timedelta(coords['h_debut'])
+coords['h_fin']=coords['Date_prel'] + pd.to_timedelta(coords['h_fin'])
+# TODO warning: time is set as start_time + 15 minutes (can be more accurate)
+coords['Date_prel']= coords['h_debut']+datetime.timedelta(minutes = 15)
+
 iopw = ua.iopw()
 iopw.load_iopw()
 
@@ -53,13 +51,11 @@ for idpr in idprs:
     alt = 0  # c['Altitude']
     name = c['ID_lac'].values[0]
 
-    ofile=os.path.join(odir,'Rrs_swr_idpr'+idpr+'_'+name+'_'+date.values[0]+'.csv')
-    header = c.stack()
+    ofile=os.path.join(odir,'Rrs_swr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
+    header = c.stack(dropna=False)
     header.index = header.index.droplevel()
     header.to_csv(ofile, header=None)
 
-
-
     # -----------------------------------------------
     #   SWR processing
     # -----------------------------------------------
@@ -79,6 +75,8 @@ for idpr in idprs:
         Rrs_stat = Rrs_stat.T
         Rrs_stat.to_csv(ofile,mode='a')
         if plot:
+            import matplotlib.pyplot as plt
+
             fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(10, 8))
             fig.subplots_adjust(left=0.1, right=0.9, hspace=.5, wspace=0.65)
             add_curve(ax, wl_swr, Rrs_swr.transpose().mean(axis=1), Rrs_swr.transpose().std(axis=1), label='swr', c='black')
diff --git a/utils/DBtrios.py b/utils/DBtrios.py
index abac7ee65796f4def4ed745c4fd189f91f904cf5..a1c883b02d3d928cba13c13ff7364eb1d671e6d5 100644
--- a/utils/DBtrios.py
+++ b/utils/DBtrios.py
@@ -58,7 +58,7 @@ def odbc2lst(conn, query):
 """ Processing """
 
 '''
-STEP 1: process TRIOS data
+STEP 1: trios TRIOS data
 '''
 
 # connect to bd TRIOS
@@ -132,7 +132,7 @@ for key in dicmeth.keys():
                     pass
 
 '''
-STEP 2: process HOBO data
+STEP 2: trios HOBO data
 '''
 
 if hobo_process:
diff --git a/utils/auxdata.py b/utils/auxdata.py
index 2a91ca14d60edb17fe4962aeb91f23f1560deb3a..3e8a65f196a5e063d3323644c045c8780737c490 100644
--- a/utils/auxdata.py
+++ b/utils/auxdata.py
@@ -1,10 +1,9 @@
 ''' modules dedicated to structure and exploit ancillary data (normally stored in aux folder)'''
 
 import pandas as pd
-import numpy as np
 from scipy.interpolate import interp1d
 
-from config import *
+from trios.config import *
 
 
 class iopw:
diff --git a/utils/utils.py b/utils/utils.py
index 53ecc6918784092d8b477577764b0d22d29b68a7..b63327c6ffd8f5a72a7852105448579f7851c317 100644
--- a/utils/utils.py
+++ b/utils/utils.py
@@ -1,9 +1,8 @@
 import pandas as pd
-import numpy as np
 from scipy.interpolate import interp1d
 
 from utils.sunposition import sunpos
-
+from trios.config import *
 
 class awr_data:
     '''
@@ -46,18 +45,23 @@ class awr_data:
         Lsky, wl_Lsky = d.load_csv(self.Lskyf)
         Lt, wl_Lt = d.load_csv(self.Ltf)
 
-        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Lt
-        Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
+        # ''' interpolate Ed, Lt and Lsky data upon common wavelength'''
+        wl = wl_common
+
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index,
-                             columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
-                             data=intEd)
+        newEd = pd.DataFrame(index=Ed.index,columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl),
+                             names=['param', 'wl']),data=intEd)
+
+        intLt = interp1d(wl_Lt, Lt.values, fill_value='extrapolate')(wl)
+        newLt = pd.DataFrame(index=Lt.index,columns=pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl),
+                             names=['param', 'wl']),data=intLt)
+
         intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
         newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
-                                                                                   names=['param', 'wl']), data=intLsky)
+                               names=['param', 'wl']), data=intLsky)
+
         # merge sensor data on time
-        df = pd.merge_asof(Lt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
         df = pd.merge_asof(df, newLsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
@@ -126,32 +130,35 @@ class iwr_data:
         # Edz.mask(Edz<0,inplace=True)
         # Luz.mask(Luz<0,inplace=True)
 
-
-
         # copy depth data to Ed frame on date index
         # Ed.index = Ed.index.droplevel(level=1)
 
-        #''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Luz
-        Luz.columns = pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)), names=['param', 'wl'])
+        #''' interpolate Ed, Edz and Luz data upon common wavelength'''
+        wl = wl_common
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
         newEd = pd.DataFrame(index=Ed.index.get_level_values(0),
                              columns=pd.MultiIndex.from_tuples(list(zip(['Ed'] * len(wl), wl)), names=['param', 'wl']),
                              data=intEd)
+
         intEdz = interp1d(wl_Edz, Edz.values, fill_value='extrapolate')(wl)
         newEdz = pd.DataFrame(index=Edz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Edz'] * len(wl), wl)),
-                                                                                 names=['param', 'wl']), data=intEdz)
+                              names=['param', 'wl']), data=intEdz)
+
+        intLuz = interp1d(wl_Luz, Luz.values, fill_value='extrapolate')(wl)
+        newLuz = pd.DataFrame(index=Luz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)),
+                              names=['param', 'wl']), data=intLuz)
+
 
         # correct depth data for sensor to sensor distance
-        Luz.reset_index(level=1, inplace=True)
-        Luz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
+        newLuz.reset_index(level=1, inplace=True)
+        newLuz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
         # newEd.reset_index(level=1,inplace=True)
 
         newEdz.reset_index(level=1, inplace=True)
         newEdz.iloc[:, 0] = newEdz.iloc[:, 0] + delta_Edz_depth
 
         # merge sensor data on time
-        df = pd.merge_asof(Luz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLuz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
         df = pd.merge_asof(df, newEdz, left_index=True, right_index=True, suffixes=('_Luz', '_Edz'),
                            tolerance=pd.Timedelta("2 seconds"),
@@ -174,7 +181,7 @@ class iwr_data:
     #
     #     dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
     #     if len(file) > 1:
-    #         print('Warning! Multiple files found but only one expected, process first file of the list:')
+    #         print('Warning! Multiple files found but only one expected, trios first file of the list:')
     #         print(file)
     #     file = file[0]
     #     df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
@@ -225,16 +232,18 @@ class swr_data:
         Ed, wl_Ed = data().load_csv(self.Edf)
         Lu0, wl_Lu0 = data().load_csv(self.Lu0f)
 
-        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Lu0
-        Lu0.columns = pd.MultiIndex.from_tuples(zip(['Lu0+'] * len(wl), wl), names=['param', 'wl'])
+        # ''' interpolate Ed and Lsky data upon common wavelengths'''
+        wl = wl_common
         intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
         newEd = pd.DataFrame(index=Ed.index,
                              columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
                              data=intEd)
+        intLu0 = interp1d(wl_Lu0, Lu0.values, fill_value='extrapolate')(wl)
+        newLu0 = pd.DataFrame(index=Lu0.index, columns=pd.MultiIndex.from_tuples(zip(['Lu0+'] * len(wl), wl),
+                             names=['param', 'wl']), data=intLu0)
 
         # merge sensor data on time
-        df = pd.merge_asof(Lu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
+        df = pd.merge_asof(newLu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
                            direction="nearest")
 
         # add solar angle data and idpr
@@ -261,7 +270,7 @@ class data:
         print(file)
         dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
         if len(file) > 1:
-            print('Warning! Multiple files found but only one expected, process first file of the list:')
+            print('Warning! Multiple files found but only one expected, trios first file of the list:')
             print(file)
         file_ = file[0]
         # df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])