diff --git a/build/__init__.py b/build/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/build/lib/__init__.py b/build/lib/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/build/lib/process/__init__.py b/build/lib/process/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/build/lib/process/process.py b/build/lib/process/process.py
deleted file mode 100644
index db4bc65b367d873c51e28c9f459203e8b6e00084..0000000000000000000000000000000000000000
--- a/build/lib/process/process.py
+++ /dev/null
@@ -1,414 +0,0 @@
-import numpy as np
-import pandas as pd
-from scipy import interpolate, integrate
-from scipy.optimize import curve_fit
-
-import plotly.plotly as py
-# import plotly.graph_objs as go
-from plotly.graph_objs import *
-
-from utils.utils import reshape as r
-import utils.auxdata as ua
-from ..config import *
-
-
-class awr_process:
-    def __init__(self, df=None, wl=None):
-        self.df = df
-        self.aot = 0.1
-        self.ws = 2
-        self.wl = wl
-        self.rhosoaa_fine_file = rhosoaa_fine_file
-        self.rhosoaa_coarse_file = rhosoaa_coarse_file
-        self.M1999_file = M1999_file
-        self.M2015_file = M2015_file
-        self.load_rho_lut()
-        self.rho = self.rhosoaa_fine
-
-    def load_rho_lut(self):
-        self.rhosoaa_fine = pd.read_csv(self.rhosoaa_fine_file, index_col=[0, 1, 2, 3, 4, 5])
-        self.rhosoaa_coarse = pd.read_csv(self.rhosoaa_coarse_file, index_col=[0, 1, 2, 3, 4, 5])
-        self.rhoM1999 = pd.read_csv(self.M1999_file, skiprows=7, index_col=[0, 1, 2, 3])
-        self.rhoM2015 = pd.read_csv(self.M2015_file, skiprows=8, index_col=[0, 1, 2, 3])
-        self.rhoM1999.dropna(inplace=True)
-        self.rhoM2015.dropna(inplace=True)
-
-    def get_rho_values(self, sza, vza, azi, ws=[2], aot=[0.1], wl=[550], sunglint=False):
-        '''
-        Interpolate the rho factor values from tabulated data
-
-        :param sza: solar zenith angle in deg, array-like
-        :param vza: view zenith angle in deg, array-like
-        :param azi: relative azimuth in deg (=0 when looking at Sun), array-like
-        :param ws: wind speed, m/s,(based on Cox-Munk parametrization of surface roughness) array-like
-        :param aot: aerosol optical thickness at 550 nm, array-like
-        :param wl: wavelength in nm, array-like
-        :param sunglint: add sunglint component in rho calculation if True
-        :return:
-        '''
-
-        grid = self.rho.rho.index.levels
-
-        # convert pandas dataframe into 6D array of the tabulated rho values for interpolation
-        rhoname = 'rho'
-        if sunglint:
-            rhoname = 'rho_g'
-
-        rho_6d = r().df2ndarray(self.rho, rhoname)
-
-        rho_ = calc().spline_2d(grid[-2:], rho_6d, (azi, vza))
-
-        rho_wl = calc().spline_4d(grid[:-2], rho_, (ws, aot, wl, sza))
-
-        return rho_wl.squeeze()
-
-    def get_rho_mobley(self, rhodf, sza, vza, azi, ws):
-        '''
-        Get the Mobley rho factor from cubic interpolation in the tabulated values
-
-        :param rhodf:
-        :param sza:
-        :param vza:
-        :param azi:
-        :param ws:
-        :return:
-        '''
-
-        rhodf = rhodf.query('sza<75 & vza >0')
-        rhodf.index = rhodf.index.remove_unused_levels()
-
-        # grid {wind, sza, vza, azi}
-        grid = rhodf.index.levels
-
-        rho_ = r().df2ndarray(rhodf, 'rho')
-        rho_mobley = calc().spline_4d(grid, rho_, (ws, sza, vza, azi))
-        return rho_mobley
-
-    def call_process(self, vza=[40], azi=[135], ws=2, aot=0.1):
-        wl = self.wl
-        Lt = self.df.loc[:, ("Lt")]
-        Lsky = self.df.loc[:, ("Lsky")]
-        Ed = self.df.loc[:, ("Ed")]
-        sza = self.df.loc[:, ("sza")].values.mean()
-        Rrs = self.process(wl, Lt, Lsky, Ed, sza, vza, azi, ws, aot)
-        Rrs.columns = pd.MultiIndex.from_product([['Rrs(awr)'], self.Rrs.columns], names=['param', 'wl'])
-        self.Rrs = Rrs
-        return self.Rrs
-
-    def process(self, wl, Lt, Lsky, Ed, sza, vza=[40], azi=[135], ws=[2], aot=[0.1]):
-        '''
-
-        :param wl:
-        :param Lt:
-        :param Lsky:
-        :param Ed:
-        :param sza:
-        :param vza:
-        :param azi:
-        :param ws:
-        :param aot:
-        :return:
-        '''
-
-        rho = self.get_rho_values([sza], vza, azi, wl=wl, ws=ws, aot=aot)
-        self.Rrs = (Lt - rho * Lsky) / Ed
-
-        return self.Rrs, rho
-
-
-class swr_process:
-    def __init__(self, df=None, wl=None, ):
-        self.df = df
-        self.wl = wl
-
-    def call_process(self, shade_corr=False):
-        wl = self.wl
-        Lu = self.df.loc[:, ("Lu0+")]
-        Ed = self.df.loc[:, ("Ed")]
-        sza = self.df.loc[:, ("sza")].values.mean()
-        Rrs = self.process(Lu, Ed, sza, wl, shade_corr=shade_corr)
-        Rrs.columns = pd.MultiIndex.from_product([['Rrs(swr)'], Rrs.columns], names=['param', 'wl'])
-        self.Rrs = Rrs
-        return Rrs
-
-    def process(self, Lu, Ed, sza, wl, R=0.05, shade_corr=False):
-        Rrs = Lu / Ed
-        ang_w = calc().angle_w(sza)
-
-        iopw = ua.iopw()
-        iopw.load_iopw()
-        iopw.get_iopw(wl)
-        a, bb = iopw.aw, iopw.bbw
-        # TODO add particulate and dissolved component to a and bb values
-        # a,bb = aux.get_iop(..., withwater=True)
-        acdom = ua.cdom(0.5, wl).get_acdom()
-        a = a + acdom + 0.4
-        bb = bb + 0.05
-        if shade_corr:
-            Rrs = self.shade_corr(Rrs, R, ang_w, a, bb, wl)
-        # Rrs.columns = pd.MultiIndex.from_product([['Rrs(swr)'], Rrs.columns], names=['param', 'wl'])
-        self.Rrs = Rrs
-        self.a = a
-        self.bb = bb
-        self.acdom = acdom
-        return self.Rrs
-
-    def epsilon(self, K, R, ang_w):
-        '''
-        epsilon from Shang et al, 2017, Applied Optics
-        :param K:
-        :param R:
-        :param ang_w: Sun zenith angle below surface (in deg)
-        :return:
-        '''
-
-        self.eps = np.array(1 - np.exp(-K * R / np.tan(np.radians(ang_w))))
-        return self.eps
-
-    def K(self, a, bb, ang_w):
-        '''
-        K (sum attenuation coef. of Lu in and outside the shade) from Shang et al, 2017, Applied Optics
-        :param a: total absorption coefficient (m-1)
-        :param bb: total backscattering coefficient (m-1)
-        :param ang_w: Sun zenith angle below surface (in deg)
-        :return:
-        '''
-        sin_ang_w = np.sin(np.radians(ang_w))
-        self.K_ = (3.15 * sin_ang_w + 1.15) * a * np.exp(-1.57 * bb) \
-                  + (5.62 * sin_ang_w - 0.23) * bb * np.exp(-0.5 * a)
-        return self.K_
-
-    def shade_corr(self, Rrs, R, ang_w, a, bb, wl, wl_cutoff=900):
-        '''
-        Correction of shading error from Shang et al, 2017, Applied Optics
-        :param Rrs:
-        :param R:
-        :param ang_w:
-        :param a:
-        :param bb:
-        :return:
-        '''
-
-        K = self.K(a, bb, ang_w)
-        eps = self.epsilon(K, R, ang_w)
-        eps[wl > wl_cutoff] = 0
-        self.Rrs = Rrs / (1 - eps)
-        return self.Rrs
-
-
-class iwr_process:
-    def __init__(self, df=None, wl=None, ):
-        self.df = df
-        self.wl = wl
-
-    def process(self):
-        wl = self.wl
-        df = self.df
-
-        reflectance = df.loc[:, ("Luz")] / df.loc[:, ("Edz")]
-        reflectance.columns = pd.MultiIndex.from_product([['reflectance'], reflectance.columns], names=['param', 'wl'])
-        self.reflectance = reflectance
-
-        df['rounded_depth', ''] = df.prof_Edz.round(1)
-        df.groupby('rounded_depth').mean()
-
-        return self.reflectance
-
-    @staticmethod
-    def f_Edz(depth, Kd, Ed0):
-        '''simple Edz model for homogeneous water column'''
-        return Ed0 * np.exp(-Kd*depth)
-
-    @staticmethod
-    def f_logEdz(depth, Kd, Ed0):
-        '''simple Edz model for homogeneous water column'''
-        return np.log(1 + iwr_process.f_Edz(depth, Kd, Ed0)) #Ed0) -Kd*depth
-
-
-    def Kd(self, depth, Edz):
-        Kd = np.diff(Edz) / np.diff(depth)
-
-    def plot_raw(self,x='Luz',y='prof_Luz'):
-        trace = Scattergl(
-            x=self.df[x].values,
-            y=self.df[y].values,
-
-            text=self.df.index.get_level_values(0),
-            hoverinfo="text",
-            marker={
-                'size': 7,
-                'opacity': 0.5,
-                # 'color': 'rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i]).flatten()),
-                # x.unique(),#color': df.index.get_level_values(0),
-                'line': {'width': 0.5, 'color': 'white'},
-            },
-            # error_y=ErrorY(
-            #     type='data',
-            #     array=df['Error'],
-            #     thickness=1.5,
-            #     width=2,
-            #     color='#B4E8FC'
-            # ),
-
-        )
-
-        layout = Layout(
-            height=450,
-            xaxis=dict(
-                range=[0, 200],
-                showgrid=False,
-                showline=False,
-                zeroline=False,
-                fixedrange=True,
-                tickvals=[0, 50, 100, 150, 200],
-                ticktext=['200', '150', '100', '50', '0'],
-                title=''
-            ),
-            yaxis=dict(
-                range=[min(-5, min(self.df[y])),
-                       max(0, max(self.df[y]))],
-                showline=False,
-                fixedrange=True,
-                zeroline=False,
-                # nticks=max(6, round(df['Speed'].iloc[-1]/10))
-            ),
-            margin=Margin(
-                t=45,
-                l=50,
-                r=50
-            )
-        )
-
-        return Figure(data=[trace], layout=layout)
-
-
-class self_shading:
-    def __init__(self):
-        '''GordonDing 1992 values for epsilon'''
-
-        self.ang = np.linspace(0, 90, 10)
-        self.eps_dir_LuZ = [2.17, 2.17, 2.23, 2.23, 2.29, 2.37, 2.41, 2.45, 2.45, 2.45]
-        self.eps_dir_EuZ = [3.14, 3.14, 3.05, 2.94, 2.80, 2.64, 2.47, 2.33, 2.33, 2.33]
-        self.eps_dif_LuZ = 4.61,
-        self.eps_dif_EuZ = 2.70
-
-    def epsilon(self, sza):
-        eps = interpolate.interp1d(self.ang, self.eps_dif_EuZ)(sza)
-        return eps
-
-
-class calc:
-    def __init__(self):
-        pass
-
-    def PAR(self, wl, Ed):
-        '''
-        Compute instantaneous PAR from Ed spectrum.
-        PAR in mW m-2
-        PAR_quanta in µmol photon m-2 s-1
-        :param wl:
-        :param Ed:
-        :return:
-        '''
-        # ---------------------------------------------
-        #      PARAMETERS
-        # Planck constant in J s or W s2
-        h = 6.6260695729e-3  # d-34
-        # light speed in m s-1
-        c = 2.99792458e0  # d8
-        # Avogadro Number in mol-1
-        Avogadro = 6.0221412927e0  # d23
-        hc = Avogadro * h * c
-        # ---------------------------------------------
-
-        idx_par = (wl >= 400) & (wl <= 700)
-        wl = wl[idx_par]
-        Ed = Ed[idx_par]
-        par = integrate.trapz(Ed, wl)
-        par_quanta = integrate.trapz(np.multiply(wl,Ed), wl) / hc
-        return par, par_quanta
-
-    def earth_sun_correction(self, dayofyear):
-        '''
-        Earth-Sun distance correction factor for adjustment of mean solar irradiance
-
-        :param dayofyear:
-        :return: correction factor
-        '''
-        theta = 2. * np.pi * dayofyear / 365
-        d2 = 1.00011 + 0.034221 * np.cos(theta) + 0.00128 * np.sin(theta) + \
-             0.000719 * np.cos(2 * theta) + 0.000077 * np.sin(2 * theta)
-        return d2
-
-    def bidir(self, sza, vza, azi):
-
-        bidir = 1
-
-        return bidir
-
-    def angle_w(self, angle_air, n=1.334):
-        '''
-        convert above surface angle (angle_air) into sub-surface angle
-        :param angle_air in deg
-        :param n: refractive index of water
-        :return: sub-surface angle in deg
-        '''
-        return np.degrees(np.arcsin(np.sin(np.radians(angle_air)) / n))
-
-    def spline_2d(self, gin, arr, gout):
-        '''
-        Interpolation of a 6D array (arr) with bicubic splines on a 2D grid
-        corresponding to the 5th and 6th dimensions of arr.
-        Return 4D array interpolated on gout.
-
-        :param gin: regular 2D grid of the tabulated data (tuple/array/list of arrays)
-        :param arr: tabulated data (N dimensions, interpolation on N-1 and N)
-        :param gout: new 2D grid on which data are interpolated (with dims 2 and 3 of the same length);
-                    (tuple/array/list of arrays)
-        :return: Interpolated data (1D or 3D array depending on the dimension shapes of gout
-        '''
-
-        N = arr.shape
-        interp = np.zeros(N[:-2])
-
-        for i in range(N[0]):
-            for j in range(N[1]):
-                for k in range(N[2]):
-                    for l in range(N[3]):
-                        interp[i, j, k, l] = interpolate.RectBivariateSpline(gin[0], gin[1], arr[i, j, k, l, ...])(
-                            gout[0], gout[1], grid=False)
-
-        return interp
-
-    def spline_4d(self, gin, lut, gout):
-        '''
-        Interpolation with two successive bicubic splines on a regular 4D grid.
-        Designed for interpolation in radiative transfer look-up tables with the two last dimensions
-        (i.e., wavelength and solar zenith angle) of the same length.
-        Those dimensions are then reduced/merged to a single one to get interpolated data on a 3D grid.
-
-        :param gin: regular 4D grid of the tabulated data (tuple/array/list of arrays)
-        :param lut: tabulated data
-        :param gout: new 4D grid on which data are interpolated (with dims 2 and 3 of the same length);
-                    (tuple/array/list of arrays)
-        :return: Interpolated data (1D or 3D array depending on the dimension shapes of gout
-        '''
-
-        N = gin[0].__len__(), gin[1].__len__(), gin[2].__len__(), gin[3].__len__()
-        Nout = gout[0].__len__(), gout[1].__len__(), gout[2].__len__()
-        tmp = np.zeros([N[0], N[1], Nout[2]])
-
-        for i in range(N[0]):
-            for j in range(N[1]):
-                tmp[i, j, :] = interpolate.RectBivariateSpline(gin[2], gin[3], lut[i, j, :, :])(gout[2], gout[3], grid=False)
-        if Nout[0] == Nout[1] == 1:
-            interp = np.ndarray(Nout[2])
-            for iband in range(Nout[2]):
-                interp[iband] = interpolate.RectBivariateSpline(gin[0], gin[1], tmp[:, :, iband])(gout[0], gout[1], grid=False)
-        else:
-            interp = np.ndarray([Nout[0], Nout[1], Nout[2]])
-            for iband in range(Nout[2]):
-                interp[:, :, iband] = interpolate.RectBivariateSpline(gin[0], gin[1], tmp[:, :, iband])(gout[0], gout[1],
-                                                                                               grid=True)
-
-        return interp
diff --git a/build/lib/process/process_compar_awr.py b/build/lib/process/process_compar_awr.py
deleted file mode 100644
index 52b90b52ac0cd618c49e5c82a46d3fb2bbd5ed1c..0000000000000000000000000000000000000000
--- a/build/lib/process/process_compar_awr.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import base64
-import pandas as pd
-import numpy as np
-import glob
-import io
-import os
-from textwrap import dedent as d
-import re
-import matplotlib.pyplot as plt
-import plotly
-import plotly.graph_objs as go
-from scipy.interpolate import interp1d
-
-from utils.sunposition import sunpos
-import utils.utils as u
-import utils.auxdata as ua
-from trios.process import *
-
-coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
-coords = pd.read_csv(coordf, sep=';')
-dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
-
-awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
-# awrfiles = glob.glob("/DATA/OBS2CO/data/trios/test_setup/raw/aw*idpr*.csv")
-swrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/Lu0*idpr*.csv")
-
-iopw = ua.iopw()
-iopw.load_iopw()
-
-def add_curve(ax, x, mean, std, c='red', label=''):
-    ax.plot(x, mean, linestyle='solid', c=c, lw=2.5,
-            alpha=0.8, label=label)
-    ax.fill_between(x,
-                    mean - std,
-                    mean + std, alpha=0.35, color=c)
-
-
-idpr = '167'
-
-# get idpr numbers
-idprs = np.unique([re.findall(r'idpr(\d+)', x)[0] for x in swrfiles])
-#idprs = np.array(['170'])
-# loop over idpr
-for idpr in idprs:
-    c = coords[coords.ID_prel == int(idpr)]  # .values[0]
-    lat = c['Lat'].values[0]
-    lon = c['Lon'].values[0]
-    alt = 0  # c['Altitude']
-    name = c['ID_lac'].values[0]
-
-    fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(10, 8))
-    fig.subplots_adjust(left=0.1, right=0.9, hspace=.5, wspace=0.65)
-
-    # -----------------------------------------------
-    #   SWR processing
-    # -----------------------------------------------
-
-    uswr = u.swr_data(idpr, swrfiles)
-    if uswr.file:
-        df, wl_swr = uswr.reader(lat, lon, alt)
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-        swr = swr_process(df, wl_swr)
-        Rrs_swr = swr.call_process()
-        add_curve(ax, wl_swr, Rrs_swr.transpose().mean(axis=1), Rrs_swr.transpose().std(axis=1), label='swr', c='black')
-        Rrs_swr = swr.call_process(shade_corr=True)
-        add_curve(ax, wl_swr, Rrs_swr.transpose().mean(axis=1), Rrs_swr.transpose().std(axis=1), label='swr', c='red')
-
-    # -----------------------------------------------
-    #   AWR processing
-    # -----------------------------------------------
-    azi = 135
-    vza = 40
-    awr = u.awr_data(idpr, awrfiles)
-    if awr.Edf:
-
-        index_idx = [0]
-
-        d = u.data(index_idx)
-        Ed, wl_Ed = d.load_csv(awr.Edf)
-        Lsky, wl_Lsky = d.load_csv(awr.Lskyf)
-        Lt, wl_Lt = d.load_csv(awr.Ltf)
-
-        # ''' interpolate Ed and Lsky data upon Lt wavelength'''
-        wl = wl_Lt
-        Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
-        intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index,
-                             columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
-                             data=intEd)
-        intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
-        newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
-                                                                                   names=['param', 'wl']), data=intLsky)
-
-        awr = awr_process()
-        ws = [2]
-
-        print(azi, vza)
-
-        Lsky = newLsky  # .loc[(newLsky.index.get_level_values(1) ==  vza) & (newLsky.index.get_level_values(2) ==  azi)]
-        Ed = newEd  # .loc[(newEd.index.get_level_values(1) ==  vza) & (newEd.index.get_level_values(2) ==  azi)]
-
-        # Lsky_idx = Lsky.index
-        # Ed_idx= Ed.index
-        # Lt_idx = Lt.index
-        # Lsky.reset_index(level=[1,2],inplace=True)
-        # Ed.reset_index(level=[1,2],inplace=True)
-        # Lt.reset_index(level=[1,2],inplace=True)
-
-        # merge sensor data on time
-        df = pd.merge_asof(Lt, Ed, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-        df = pd.merge_asof(df, Lsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-
-        # add solar angle data and idpr
-        # compute solar angle (mean between fisrt and last aqcuisition time
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-
-        rho_h = awr.get_rho_values([df.sza.mean()], [vza], [azi], wl=wl)
-        rho15 = awr.get_rho_mobley(awr.rhoM2015, [df.sza.mean()], [vza], [azi], [ws])
-        rho99 = awr.get_rho_mobley(awr.rhoM1999, [df.sza.mean()], [vza], [azi], [ws])
-
-        Rrs_h = (df.loc[:, 'Lt'] - rho_h * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
-        Rrs15 = (df.loc[:, 'Lt'] - rho15 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
-
-        Rrs99 = (df.loc[:, 'Lt'] - rho99 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
-        # plt.figure()
-
-        add_curve(ax, wl, Rrs15.transpose().mean(axis=1), Rrs15.transpose().std(axis=1),
-                  label='M2015 (' + str(rho15) + ')')
-        add_curve(ax, wl, Rrs99.transpose().mean(axis=1), Rrs99.transpose().std(axis=1), c='orange',
-                  label='M1999(' + str(rho99) + ')')
-        add_curve(ax, wl, Rrs_h.transpose().mean(axis=1), Rrs_h.transpose().std(axis=1), c='grey',
-                  label='h(' + str(rho_h.mean()) + ')')
-
-    ax.set_title('azi=' + str(azi) + ', vza=' + str(vza) + ', sza=' + str(sza))
-
-    ax.legend(loc='best', frameon=False)
-
-    ax.set_ylabel(r'$R_{rs}\  (sr^{-1})$')
-    ax.set_xlabel(r'Wavelength (nm)')
-    fig.savefig(os.path.join(dirfig, 'trios_awr_' + name + '_idpr' + idpr + '.png'))
-    plt.close()
-
diff --git a/build/lib/process/process_test_setup.py b/build/lib/process/process_test_setup.py
deleted file mode 100644
index 7f265bad4996766ba373f5077b1fc89100574b0d..0000000000000000000000000000000000000000
--- a/build/lib/process/process_test_setup.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import base64
-import pandas as pd
-import numpy as np
-import glob
-import io
-import os
-from textwrap import dedent as d
-import re
-import matplotlib.pyplot as plt
-import plotly
-import plotly.graph_objs as go
-from scipy.interpolate import interp1d
-
-from utils.sunposition import sunpos
-import utils.utils as u
-from trios.process import *
-
-
-coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
-coords = pd.read_csv(coordf, sep=';')
-awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
-
-awrfiles = glob.glob("/DATA/OBS2CO/data/trios/test_setup/raw/aw*idpr*.csv")
-swrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/Lu0*idpr*.csv")
-
-idpr='167'
-
-c = coords[coords.ID_prel == int(idpr)]  # .values[0]
-lat = c['Lat'].values[0]
-lon = c['Lon'].values[0]
-alt = 0 #c['Altitude']
-name = c['ID_lac'].values[0]
-
-# -----------------------------------------------
-#   SWR processing
-# -----------------------------------------------
-
-swr = u.swr_data(idpr, swrfiles)
-if swr.file:
-    df, wl = swr.reader(lat, lon, alt)
-    Rrs_swr = swr_process(df, wl).process()
-
-# -----------------------------------------------
-#   AWR processing
-# -----------------------------------------------
-awr = u.awr_data(idpr, awrfiles)
-
-index_idx=[2,0,1]
-
-d=u.data(index_idx)
-Ed, wl_Ed = d.load_csv(awr.Edf)
-Lsky, wl_Lsky = d.load_csv(awr.Lskyf)
-Lt0, wl_Lt = d.load_csv(awr.Ltf)
-
-# ''' interpolate Ed and Lsky data upon Lt wavelength'''
-wl = wl_Lt
-Lt0.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
-intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-newEd = pd.DataFrame(index=Ed.index,
-                     columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
-                     data=intEd)
-intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
-newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
-                        names=['param', 'wl']), data=intLsky)
-
-awr = awr_process()
-ws=[2]
-fig, axs = plt.subplots(nrows=3, ncols=4, figsize=(16, 10))
-fig.subplots_adjust(left=0.1, right=0.9, hspace=.5, wspace=0.65)
-
-i=0
-for azi, Lt1 in Lt0.groupby(level=2):
-    for vza,Lt in Lt1.groupby(level=1):
-        ax = axs.flat[i]
-        i=i+1
-        print(azi,vza)
-
-        Lsky = newLsky.loc[(newLsky.index.get_level_values(1) ==  vza) & (newLsky.index.get_level_values(2) ==  azi)]
-        Ed = newEd.loc[(newEd.index.get_level_values(1) ==  vza) & (newEd.index.get_level_values(2) ==  azi)]
-
-        Lsky_idx = Lsky.index
-        Ed_idx= Ed.index
-        Lt_idx = Lt.index
-        Lsky.reset_index(level=[1,2],inplace=True)
-        Ed.reset_index(level=[1,2],inplace=True)
-        Lt.reset_index(level=[1,2],inplace=True)
-
-        # merge sensor data on time
-        df = pd.merge_asof(Lt, Ed, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-        df = pd.merge_asof(df, Lsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-
-        # add solar angle data and idpr
-        # compute solar angle (mean between fisrt and last aqcuisition time
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-
-        rho_h = awr.get_rho_values([df.sza.min()],[vza],[azi],wl=wl)
-        rho15 = awr.get_rho_mobley(awr.rhoM2015,[df.sza.min()],[vza],[azi],[ws])
-        rho99 = awr.get_rho_mobley(awr.rhoM1999,[df.sza.min()],[vza],[azi],[ws])
-
-        Rrs_h =(df.loc[:,'Lt'] -rho_h*df.loc[:,'Lsky'])/ df.loc[:,'Ed']
-        Rrs15 = (df.loc[:,'Lt'] -rho15*df.loc[:,'Lsky'])/ df.loc[:,'Ed']
-
-        Rrs99 = (df.loc[:,'Lt'] -rho99*df.loc[:,'Lsky'])/ df.loc[:,'Ed']
-        #plt.figure()
-
-
-        def add_curve(ax,x,mean,std,c='red',label=''):
-            ax.plot(x,mean, linestyle='solid', c=c, lw=2.5,
-                alpha=0.8, label=label)
-            ax.fill_between(x,
-                    mean - std,
-                    mean + std, alpha=0.35,color=c)
-        add_curve(ax,wl,Rrs_swr.transpose().mean(axis=1),Rrs_swr.transpose().std(axis=1),label='swr',c='black')
-        add_curve(ax,wl,Rrs15.transpose().mean(axis=1),Rrs15.transpose().std(axis=1),label='M2015')
-        add_curve(ax,wl,Rrs99.transpose().mean(axis=1),Rrs99.transpose().std(axis=1),c='orange',label='M1999')
-        add_curve(ax,wl,Rrs_h.transpose().mean(axis=1),Rrs_h.transpose().std(axis=1),c='grey',label='h')
-
-        ax.set_title('azi='+str(azi)+', vza='+str(vza))
-
-
-        ax.legend(loc='best', frameon=False)
-
-        ax.set_ylabel(r'$R_{rs}\  (sr^{-1})$')
-        ax.set_xlabel(r'Wavelength (nm)')
-
-Lt.index.names
-
-
-
diff --git a/build/lib/utils/DButils.py b/build/lib/utils/DButils.py
deleted file mode 100644
index 67242aa9aff2d725d87b0b64ee13179222608119..0000000000000000000000000000000000000000
--- a/build/lib/utils/DButils.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import psycopg2
-
-username, pwd = 'tristan.harmel', '7.zcBjFa' #*****'
-
-IDlake = 'SRC04'
-
-DBserver = 'serveurbd.aix.irstea.priv'
-DBname = 'bd_plando'
-
-db = psycopg2.connect(
-    "host= " + DBserver + " port='5434' dbname='" + DBname +
-    "' user='" + username + "' password='" + pwd + "'")
-
-SQL = "SELECT altitude_pla FROM plan_eau WHERE code_lac='" + IDlake + "';"
-
-altitude = pgSQLquery2numpy(db, SQL)['altitude_pla'][0]
diff --git a/build/lib/utils/__init__.py b/build/lib/utils/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/build/lib/utils/format_info4acix.py b/build/lib/utils/format_info4acix.py
deleted file mode 100644
index 0e55df08dcc1c6697290a2ac171a1ac0fd4067a0..0000000000000000000000000000000000000000
--- a/build/lib/utils/format_info4acix.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import os
-import pandas as pd
-import numpy as np
-import datetime
-
-infof = os.path.abspath("/DATA/OBS2CO/data/info/mesures_in_situ.csv")
-acixf= os.path.abspath("/DATA/OBS2CO/data/info/acix_info.csv")
-info = pd.read_csv(infof, sep=';')
-info.sort_values('ID_prel',inplace=True)
-
-privID=info.ID_prel
-lat=info.Lat.round(5)
-lon=info.Lon.round(5)
-date=pd.to_datetime(info.Date_prel).dt.strftime('%m-%d-%Y')
-time=pd.to_datetime(info.h_debut)+datetime.timedelta(minutes = 15)
-time=time.dt.strftime('%H.%M')
-time.name='start_time_plus15min'
-acix_info=pd.concat([privID,lat,lon,date,time],axis=1)
-acix_info.to_csv(acixf)
\ No newline at end of file
diff --git a/build/lib/utils/sunposition.py b/build/lib/utils/sunposition.py
deleted file mode 100644
index 61e4eebad8de8998e887055fef0f72160862f434..0000000000000000000000000000000000000000
--- a/build/lib/utils/sunposition.py
+++ /dev/null
@@ -1,653 +0,0 @@
-# The MIT License (MIT)
-# 
-# Copyright (c) 2016 Samuel Bear Powell
-# 
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-# 
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-# 
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-import numpy as np
-from datetime import datetime
-
-class _sp:
-    @staticmethod
-    def calendar_time(dt):
-        try:
-            x = dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond
-            return x
-        except AttributeError:
-            try:
-                return _sp.calendar_time(datetime.utcfromtimestamp(dt)) #will raise OSError if dt is not acceptable
-            except:
-                raise TypeError('dt must be datetime object or POSIX timestamp')
-
-    @staticmethod
-    def julian_day(dt):
-        """Calculate the Julian Day from a datetime.datetime object in UTC"""
-        # year and month numbers
-        yr, mo, dy, hr, mn, sc, us = _sp.calendar_time(dt)
-        if mo <= 2:  # From paper: "if M = 1 or 2, then Y = Y - 1 and M = M + 12"
-            mo += 12
-            yr -= 1
-        # day of the month with decimal time
-        dy = dy + hr/24.0 + mn/(24.0*60.0) + sc/(24.0*60.0*60.0) + us/(24.0*60.0*60.0*1e6)
-        # b is equal to 0 for the julian calendar and is equal to (2- A +
-        # INT(A/4)), A = INT(Y/100), for the gregorian calendar
-        a = int(yr / 100)
-        b = 2 - a + int(a / 4)
-        jd = int(365.25 * (yr + 4716)) + int(30.6001 * (mo + 1)) + dy + b - 1524.5
-        return jd
-
-    @staticmethod
-    def julian_ephemeris_day(jd, deltat):
-        """Calculate the Julian Ephemeris Day from the Julian Day and delta-time = (terrestrial time - universal time) in seconds"""
-        return jd + deltat / 86400.0
-
-    @staticmethod
-    def julian_century(jd):
-        """Caluclate the Julian Century from Julian Day or Julian Ephemeris Day"""
-        return (jd - 2451545.0) / 36525.0
-
-    @staticmethod
-    def julian_millennium(jc):
-        """Calculate the Julian Millennium from Julian Ephemeris Century"""
-        return jc / 10.0
-
-    # Earth Periodic Terms
-    # Earth Heliocentric Longitude coefficients (L0, L1, L2, L3, L4, and L5 in paper)
-    _EHL_ = [#L0:
-                [(175347046, 0.0, 0.0), (3341656, 4.6692568, 6283.07585), (34894, 4.6261, 12566.1517),
-                (3497, 2.7441, 5753.3849), (3418, 2.8289, 3.5231), (3136, 3.6277, 77713.7715),
-                (2676, 4.4181, 7860.4194), (2343, 6.1352, 3930.2097), (1324, 0.7425, 11506.7698),
-                (1273, 2.0371, 529.691), (1199, 1.1096, 1577.3435), (990, 5.233, 5884.927),
-                (902, 2.045, 26.298), (857, 3.508, 398.149), (780, 1.179, 5223.694),
-                (753, 2.533, 5507.553), (505, 4.583, 18849.228), (492, 4.205, 775.523),
-                (357, 2.92, 0.067), (317, 5.849, 11790.629), (284, 1.899, 796.298),
-                (271, 0.315, 10977.079), (243, 0.345, 5486.778), (206, 4.806, 2544.314),
-                (205, 1.869, 5573.143), (202, 2.4458, 6069.777), (156, 0.833, 213.299),
-                (132, 3.411, 2942.463), (126, 1.083, 20.775), (115, 0.645, 0.98),
-                (103, 0.636, 4694.003), (102, 0.976, 15720.839), (102, 4.267, 7.114),
-                (99, 6.21, 2146.17), (98, 0.68, 155.42), (86, 5.98, 161000.69),
-                (85, 1.3, 6275.96), (85, 3.67, 71430.7), (80, 1.81, 17260.15),
-                (79, 3.04, 12036.46), (71, 1.76, 5088.63), (74, 3.5, 3154.69),
-                (74, 4.68, 801.82), (70, 0.83, 9437.76), (62, 3.98, 8827.39),
-                (61, 1.82, 7084.9), (57, 2.78, 6286.6), (56, 4.39, 14143.5),
-                (56, 3.47, 6279.55), (52, 0.19, 12139.55), (52, 1.33, 1748.02),
-                (51, 0.28, 5856.48), (49, 0.49, 1194.45), (41, 5.37, 8429.24),
-                (41, 2.4, 19651.05), (39, 6.17, 10447.39), (37, 6.04, 10213.29),
-                (37, 2.57, 1059.38), (36, 1.71, 2352.87), (36, 1.78, 6812.77),
-                (33, 0.59, 17789.85), (30, 0.44, 83996.85), (30, 2.74, 1349.87),
-                (25, 3.16, 4690.48)],
-            #L1:
-                [(628331966747, 0.0, 0.0), (206059, 2.678235, 6283.07585), (4303, 2.6351, 12566.1517),
-                (425, 1.59, 3.523), (119, 5.796, 26.298), (109, 2.966, 1577.344),
-                (93, 2.59, 18849.23), (72, 1.14, 529.69), (68, 1.87, 398.15),
-                (67, 4.41, 5507.55), (59, 2.89, 5223.69), (56, 2.17, 155.42),
-                (45, 0.4, 796.3), (36, 0.47, 775.52), (29, 2.65, 7.11),
-                (21, 5.34, 0.98), (19, 1.85, 5486.78), (19, 4.97, 213.3),
-                (17, 2.99, 6275.96), (16, 0.03, 2544.31), (16, 1.43, 2146.17),
-                (15, 1.21, 10977.08), (12, 2.83, 1748.02), (12, 3.26, 5088.63),
-                (12, 5.27, 1194.45), (12, 2.08, 4694), (11, 0.77, 553.57),
-                (10, 1.3, 3286.6), (10, 4.24, 1349.87), (9, 2.7, 242.73),
-                (9, 5.64, 951.72), (8, 5.3, 2352.87), (6, 2.65, 9437.76),
-                (6, 4.67, 4690.48)],
-            #L2:
-                [(52919, 0.0, 0.0), (8720, 1.0721, 6283.0758), (309, 0.867, 12566.152),
-                (27, 0.05, 3.52), (16, 5.19, 26.3), (16, 3.68, 155.42),
-                (10, 0.76, 18849.23), (9, 2.06, 77713.77), (7, 0.83, 775.52),
-                (5, 4.66, 1577.34), (4, 1.03, 7.11), (4, 3.44, 5573.14),
-                (3, 5.14, 796.3), (3, 6.05, 5507.55), (3, 1.19, 242.73),
-                (3, 6.12, 529.69), (3, 0.31, 398.15), (3, 2.28, 553.57),
-                (2, 4.38, 5223.69), (2, 3.75, 0.98)],
-            #L3:
-                [(289, 5.844, 6283.076), (35, 0.0, 0.0,), (17, 5.49, 12566.15),
-                (3, 5.2, 155.42), (1, 4.72, 3.52), (1, 5.3, 18849.23),
-                (1, 5.97, 242.73)],
-            #L4:
-               [(114, 3.142, 0.0), (8, 4.13, 6283.08), (1, 3.84, 12566.15)],
-            #L5:
-               [(1, 3.14, 0.0)]
-            ]
-
-    #Earth Heliocentric Longitude coefficients (B0 and B1 in paper)
-    _EHB_ = [ #B0:
-                [(280, 3.199, 84334.662), (102, 5.422, 5507.553), (80, 3.88, 5223.69),
-                (44, 3.7, 2352.87), (32, 4.0, 1577.34)],
-            #B1:
-                [(9, 3.9, 5507.55), (6, 1.73, 5223.69)]
-            ]
-
-    #Earth Heliocentric Radius coefficients (R0, R1, R2, R3, R4)
-    _EHR_ = [#R0:
-                [(100013989, 0.0, 0.0), (1670700, 3.0984635, 6283.07585), (13956, 3.05525, 12566.1517),
-                (3084, 5.1985, 77713.7715), (1628, 1.1739, 5753.3849), (1576, 2.8469, 7860.4194),
-                (925, 5.453, 11506.77), (542, 4.564, 3930.21), (472, 3.661, 5884.927),
-                (346, 0.964, 5507.553), (329, 5.9, 5223.694), (307, 0.299, 5573.143),
-                (243, 4.273, 11790.629), (212, 5.847, 1577.344), (186, 5.022, 10977.079),
-                (175, 3.012, 18849.228), (110, 5.055, 5486.778), (98, 0.89, 6069.78),
-                (86, 5.69, 15720.84), (86, 1.27, 161000.69), (85, 0.27, 17260.15),
-                (63, 0.92, 529.69), (57, 2.01, 83996.85), (56, 5.24, 71430.7),
-                (49, 3.25, 2544.31), (47, 2.58, 775.52), (45, 5.54, 9437.76),
-                (43, 6.01, 6275.96), (39, 5.36, 4694), (38, 2.39, 8827.39),
-                (37, 0.83, 19651.05), (37, 4.9, 12139.55), (36, 1.67, 12036.46),
-                (35, 1.84, 2942.46), (33, 0.24, 7084.9), (32, 0.18, 5088.63),
-                (32, 1.78, 398.15), (28, 1.21, 6286.6), (28, 1.9, 6279.55),
-                (26, 4.59, 10447.39)],
-            #R1:
-                [(103019, 1.10749, 6283.07585), (1721, 1.0644, 12566.1517), (702, 3.142, 0.0),
-                (32, 1.02, 18849.23), (31, 2.84, 5507.55), (25, 1.32, 5223.69),
-                (18, 1.42, 1577.34), (10, 5.91, 10977.08), (9, 1.42, 6275.96),
-                (9, 0.27, 5486.78)],
-            #R2:
-                [(4359, 5.7846, 6283.0758), (124, 5.579, 12566.152), (12, 3.14, 0.0),
-                (9, 3.63, 77713.77), (6, 1.87, 5573.14), (3, 5.47, 18849)],
-            #R3:
-                [(145, 4.273, 6283.076), (7, 3.92, 12566.15)],
-            #R4:
-                [(4, 2.56, 6283.08)]
-            ]
-
-    @staticmethod
-    def heliocentric_longitude(jme):
-        """Compute the Earth Heliocentric Longitude (L) in degrees given the Julian Ephemeris Millennium"""
-        #L5, ..., L0
-        Li = [sum(a*np.cos(b + c*jme) for a,b,c in abcs) for abcs in reversed(_sp._EHL_)]
-        L = np.polyval(Li, jme) / 1e8
-        L = np.rad2deg(L) % 360
-        return L
-    @staticmethod
-    def heliocentric_latitude(jme):
-        """Compute the Earth Heliocentric Latitude (B) in degrees given the Julian Ephemeris Millennium"""
-        Bi = [sum(a*np.cos(b + c*jme) for a,b,c in abcs) for abcs in reversed(_sp._EHB_)]
-        B = np.polyval(Bi, jme) / 1e8
-        B = np.rad2deg(B) % 360
-        return B
-    @staticmethod
-    def heliocentric_radius(jme):
-        """Compute the Earth Heliocentric Radius (R) in astronimical units given the Julian Ephemeris Millennium"""
-        Ri = [sum(a*np.cos(b + c*jme) for a,b,c in abcs) for abcs in reversed(_sp._EHR_)]
-        R = np.polyval(Ri, jme) / 1e8
-        return R
-    @staticmethod
-    def heliocentric_position(jme):
-        """Compute the Earth Heliocentric Longitude, Latitude, and Radius given the Julian Ephemeris Millennium
-            Returns (L, B, R) where L = longitude in degrees, B = latitude in degrees, and R = radius in astronimical units
-        """
-        return _sp.heliocentric_longitude(jme), _sp.heliocentric_latitude(jme), _sp.heliocentric_radius(jme)
-    @staticmethod
-    def geocentric_position(helio_pos):
-        """Compute the geocentric latitude (Theta) and longitude (beta) (in degrees) of the sun given the earth's heliocentric position (L, B, R)"""
-        L,B,R = helio_pos
-        th = L + 180
-        b = -B
-        return (th, b)
-
-    #Nutation Longitude and Obliquity coefficients (Y)
-    _NLOY_ = [(0,   0,   0,   0,   1), (-2,  0,   0,   2,   2), (0,   0,   0,   2,   2),
-              (0,   0,   0,   0,   2), (0,   1,   0,   0,   0), (0,   0,   1,   0,   0),
-              (-2,  1,   0,   2,   2), (0,   0,   0,   2,   1), (0,   0,   1,   2,   2),
-              (-2,  -1,  0,   2,   2), (-2,  0,   1,   0,   0), (-2,  0,   0,   2,   1),
-              (0,   0,   -1,  2,   2), (2,   0,   0,   0,   0), (0,   0,   1,   0,   1),
-              (2,   0,   -1,  2,   2), (0,   0,   -1,  0,   1), (0,   0,   1,   2,   1),
-              (-2,  0,   2,   0,   0), (0,   0,   -2,  2,   1), (2,   0,   0,   2,   2),
-              (0,   0,   2,   2,   2), (0,   0,   2,   0,   0), (-2,  0,   1,   2,   2),
-              (0,   0,   0,   2,   0), (-2,  0,   0,   2,   0), (0,   0,   -1,  2,   1),
-              (0,   2,   0,   0,   0), (2,   0,   -1,  0,   1), (-2,  2,   0,   2,   2),
-              (0,   1,   0,   0,   1), (-2,  0,   1,   0,   1), (0,   -1,  0,   0,   1),
-              (0,   0,   2,   -2,  0), (2,   0,   -1,  2,   1), (2,   0,   1,   2,   2),
-              (0,   1,   0,   2,   2), (-2,  1,   1,   0,   0), (0,   -1,  0,   2,   2),
-              (2,   0,   0,   2,   1), (2,   0,   1,   0,   0), (-2,  0,   2,   2,   2),
-              (-2,  0,   1,   2,   1), (2,   0,   -2,  0,   1), (2,   0,   0,   0,   1),
-              (0,   -1,  1,   0,   0), (-2,  -1,  0,   2,   1), (-2,  0,   0,   0,   1),
-              (0,   0,   2,   2,   1), (-2,  0,   2,   0,   1), (-2,  1,   0,   2,   1),
-              (0,   0,   1,   -2,  0), (-1,  0,   1,   0,   0), (-2,  1,   0,   0,   0),
-              (1,   0,   0,   0,   0), (0,   0,   1,   2,   0), (0,   0,   -2,  2,   2),
-              (-1,  -1,  1,   0,   0), (0,   1,   1,   0,   0), (0,   -1,  1,   2,   2),
-              (2,   -1,  -1,  2,   2), (0,   0,   3,   2,   2), (2,   -1,  0,   2,   2)]
-    #Nutation Longitude and Obliquity coefficients (a,b)
-    _NLOab_ = [(-171996, -174.2), (-13187, -1.6), (-2274, -0.2), (2062, 0.2), (1426, -3.4), (712, 0.1),
-               (-517, 1.2), (-386, -0.4), (-301, 0), (217, -0.5), (-158, 0), (129, 0.1),
-               (123, 0), (63,  0), (63,  0.1), (-59, 0), (-58, -0.1), (-51, 0),
-               (48,  0), (46,  0), (-38, 0), (-31, 0), (29,  0), (29,  0),
-               (26,  0), (-22, 0), (21,  0), (17,  -0.1), (16,  0), (-16, 0.1),
-               (-15, 0), (-13, 0), (-12, 0), (11,  0), (-10, 0), (-8,  0),
-               (7,   0), (-7,  0), (-7,  0), (-7,  0), (6,   0), (6,   0),
-               (6,   0), (-6,  0), (-6,  0), (5,   0), (-5,  0), (-5,  0),
-               (-5,  0), (4,   0), (4,   0), (4,   0), (-4,  0), (-4,  0),
-               (-4,  0), (3,   0), (-3,  0), (-3,  0), (-3,  0), (-3,  0),
-               (-3,  0), (-3,  0), (-3,  0)]
-    #Nutation Longitude and Obliquity coefficients (c,d)
-    _NLOcd_ = [(92025,   8.9), (5736,    -3.1), (977, -0.5), (-895,    0.5),
-               (54,  -0.1), (-7,  0), (224, -0.6), (200, 0),
-               (129, -0.1), (-95, 0.3), (0,   0), (-70, 0),
-               (-53, 0), (0,   0), (-33, 0), (26,  0),
-               (32,  0), (27,  0), (0,   0), (-24, 0),
-               (16,  0), (13,  0), (0,   0), (-12, 0),
-               (0,   0), (0,   0), (-10, 0), (0,   0),
-               (-8,  0), (7,   0), (9,   0), (7,   0),
-               (6,   0), (0,   0), (5,   0), (3,   0),
-               (-3,  0), (0,   0), (3,   0), (3,   0),
-               (0,   0), (-3,  0), (-3,  0), (3,   0),
-               (3,   0), (0,   0), (3,   0), (3,   0),
-               (3,   0)]
-
-    @staticmethod
-    def ecliptic_obliquity(jme, delta_epsilon):
-        """Calculate the true obliquity of the ecliptic (epsilon, in degrees) given the Julian Ephemeris Millennium and the obliquity"""
-        u = jme/10
-        e0 = np.polyval([2.45, 5.79, 27.87, 7.12, -39.05, -249.67, -51.38, 1999.25, -1.55, -4680.93, 84381.448], u)
-        e = e0/3600.0 + delta_epsilon
-        return e
-
-    @staticmethod
-    def nutation_obliquity(jce):
-        """compute the nutation in longitude (delta_psi) and the true obliquity (epsilon) given the Julian Ephemeris Century"""
-        
-        #mean elongation of the moon from the sun, in radians:
-        #x0 = 297.85036 + 445267.111480*jce - 0.0019142*(jce**2) + (jce**3)/189474
-        x0 = np.deg2rad(np.polyval([1./189474, -0.0019142, 445267.111480, 297.85036],jce))
-        #mean anomaly of the sun (Earth), in radians:
-        x1 = np.deg2rad(np.polyval([-1/3e5, -0.0001603, 35999.050340, 357.52772], jce))
-        #mean anomaly of the moon, in radians:
-        x2 = np.deg2rad(np.polyval([1./56250, 0.0086972, 477198.867398, 134.96298], jce))
-        #moon's argument of latitude, in radians:
-        x3 = np.deg2rad(np.polyval([1./327270, -0.0036825, 483202.017538, 93.27191], jce))
-        #Longitude of the ascending node of the moon's mean orbit on the ecliptic
-        # measured from the mean equinox of the date, in radians
-        x4 = np.deg2rad(np.polyval([1./45e4, 0.0020708, -1934.136261, 125.04452], jce))
-
-        x = (x0, x1, x2, x3, x4)
-
-        dp = 0.0
-        for y, ab in zip(_sp._NLOY_, _sp._NLOab_):
-            a,b = ab
-            dp += (a + b*jce)*np.sin(np.dot(x, y))
-        dp = np.rad2deg(dp)/36e6
-
-        de = 0.0
-        for y, cd in zip(_sp._NLOY_, _sp._NLOcd_):
-            c,d = cd
-            de += (c + d*jce)*np.cos(np.dot(x, y))
-        de = np.rad2deg(de)/36e6
-
-        e = _sp.ecliptic_obliquity(_sp.julian_millennium(jce), de)
-
-        return dp, e
-    
-    @staticmethod
-    def abberation_correction(R):
-        """Calculate the abberation correction (delta_tau, in degrees) given the Earth Heliocentric Radius (in AU)"""
-        return -20.4898/(3600*R)
-    
-    @staticmethod
-    def sun_longitude(helio_pos, delta_psi):
-        """Calculate the apparent sun longitude (lambda, in degrees) and geocentric longitude (beta, in degrees) given the earth heliocentric position and delta_psi"""
-        L,B,R = helio_pos
-        theta = L + 180 #geocentric latitude
-        beta = -B
-        ll = theta + delta_psi + _sp.abberation_correction(R)
-        return ll, beta
-    
-    @staticmethod
-    def greenwich_sidereal_time(jd, delta_psi, epsilon):
-        """Calculate the apparent Greenwich sidereal time (v, in degrees) given the Julian Day"""
-        jc = _sp.julian_century(jd)
-        #mean sidereal time at greenwich, in degrees:
-        v0 = (280.46061837 + 360.98564736629*(jd - 2451545) + 0.000387933*(jc**2) - (jc**3)/38710000) % 360
-        v = v0 + delta_psi*np.cos(np.deg2rad(epsilon))
-        return v
-    
-    @staticmethod
-    def sun_ra_decl(llambda, epsilon, beta):
-        """Calculate the sun's geocentric right ascension (alpha, in degrees) and declination (delta, in degrees)"""
-        l, e, b = map(np.deg2rad, (llambda, epsilon, beta))
-        alpha = np.arctan2(np.sin(l)*np.cos(e) - np.tan(b)*np.sin(e), np.cos(l)) #x1 / x2
-        alpha = np.rad2deg(alpha) % 360
-        delta = np.arcsin(np.sin(b)*np.cos(e) + np.cos(b)*np.sin(e)*np.sin(l))
-        delta = np.rad2deg(delta)
-        return alpha, delta
-    
-    @staticmethod
-    def sun_topo_ra_decl_hour(latitude, longitude, elevation, jd, delta_t = 0):
-        """Calculate the sun's topocentric right ascension (alpha'), declination (delta'), and hour angle (H')"""
-        
-        jde = _sp.julian_ephemeris_day(jd, delta_t)
-        jce = _sp.julian_century(jde)
-        jme = _sp.julian_millennium(jce)
-
-        helio_pos = _sp.heliocentric_position(jme)
-        R = helio_pos[-1]
-        phi, sigma, E = latitude, longitude, elevation
-        #equatorial horizontal parallax of the sun, in radians
-        xi = np.deg2rad(8.794/(3600*R)) #
-        #rho = distance from center of earth in units of the equatorial radius
-        #phi-prime = geocentric latitude
-        #NB: These equations look like their based on WGS-84, but are rounded slightly
-        # The WGS-84 reference ellipsoid has major axis a = 6378137 m, and flattening factor 1/f = 298.257223563
-        # minor axis b = a*(1-f) = 6356752.3142 = 0.996647189335*a
-        u = np.arctan(0.99664719*np.tan(phi)) #
-        x = np.cos(u) + E*np.cos(phi)/6378140 #rho sin(phi-prime)
-        y = 0.99664719*np.sin(u) + E*np.sin(phi)/6378140 #rho cos(phi-prime)
-
-        delta_psi, epsilon = _sp.nutation_obliquity(jce) #
-
-        llambda, beta = _sp.sun_longitude(helio_pos, delta_psi) #
-        
-        alpha, delta = _sp.sun_ra_decl(llambda, epsilon, beta) #
-
-        v = _sp.greenwich_sidereal_time(jd, delta_psi, epsilon) #
-
-        H = v + longitude - alpha #
-        Hr, dr = map(np.deg2rad,(H,delta))
-
-        dar = np.arctan2(-x*np.sin(xi)*np.sin(Hr), np.cos(dr)-x*np.sin(xi)*np.cos(Hr))
-        delta_alpha = np.rad2deg(dar) #
-        
-        alpha_prime = alpha + delta_alpha #
-        delta_prime = np.rad2deg(np.arctan2((np.sin(dr) - y*np.sin(xi))*np.cos(dar), np.cos(dr) - y*np.sin(xi)*np.cos(Hr))) #
-        H_prime = H - delta_alpha #
-
-        return alpha_prime, delta_prime, H_prime
-    
-    @staticmethod
-    def sun_topo_azimuth_zenith(latitude, delta_prime, H_prime, temperature=14.6, pressure=1013):
-        """Compute the sun's topocentric azimuth and zenith angles
-        azimuth is measured eastward from north, zenith from vertical
-        temperature = average temperature in C (default is 14.6 = global average in 2013)
-        pressure = average pressure in mBar (default 1013 = global average)
-        """
-        phi = np.deg2rad(latitude)
-        dr, Hr = map(np.deg2rad,(delta_prime, H_prime))
-        P, T = pressure, temperature
-        e0 = np.rad2deg(np.arcsin(np.sin(phi)*np.sin(dr) + np.cos(phi)*np.cos(dr)*np.cos(Hr)))
-        tmp = np.deg2rad(e0 + 10.3/(e0+5.11))
-        delta_e = (P/1010.0)*(283.0/(273+T))*(1.02/(60*np.tan(tmp)))
-        e = e0 + delta_e
-        zenith = 90 - e
-
-        gamma = np.rad2deg(np.arctan2(np.sin(Hr), np.cos(Hr)*np.sin(phi) - np.tan(dr)*np.cos(phi))) % 360
-        Phi = (gamma + 180) % 360 #azimuth from north
-        return Phi, zenith
-
-    @staticmethod
-    def norm_lat_lon(lat,lon):
-        if lat < -90 or lat > 90:
-            #convert to cartesian and back
-            x = np.cos(np.deg2rad(lon))*np.cos(np.deg2rad(lat))
-            y = np.sin(np.deg2rad(lon))*np.cos(np.deg2rad(lat))
-            z = np.sin(np.deg2rad(lat))
-            r = np.sqrt(x**2 + y**2 + z**2)
-            lon = np.rad2deg(np.arctan2(y,x)) % 360
-            lat = np.rad2deg(np.arcsin(z/r))
-        elif lon < 0 or lon > 360:
-            lon = lon % 360
-        return lat,lon
-
-    @staticmethod
-    def topo_pos(t,lat,lon,elev,temp,press,dt):
-        """compute RA,dec,H, all in degrees"""
-        lat,lon = _sp.norm_lat_lon(lat,lon)
-        jd = _sp.julian_day(t)
-        RA, dec, H = _sp.sun_topo_ra_decl_hour(lat, lon, elev, jd, dt)
-        return RA, dec, H
-
-    @staticmethod
-    def pos(t,lat,lon,elev,temp,press,dt):
-        """Compute azimute,zenith,RA,dec,H all in degrees"""
-        lat,lon = _sp.norm_lat_lon(lat,lon)
-        jd = _sp.julian_day(t)
-        RA, dec, H = _sp.sun_topo_ra_decl_hour(lat, lon, elev, jd, dt)
-        azimuth, zenith = _sp.sun_topo_azimuth_zenith(lat, dec, H, temp, press)
-        return azimuth,zenith,RA,dec,H
-
-def julian_day(dt):
-    """Convert UTC datetimes or UTC timestamps to Julian days
-
-    Parameters
-    ----------
-    dt : array_like
-        UTC datetime objects or UTC timestamps (as per datetime.utcfromtimestamp)
-
-    Returns
-    -------
-    jd : ndarray
-        datetimes converted to fractional Julian days
-    """
-    dts = np.array(dt)
-    if len(dts.shape) == 0:
-        return _sp.julian_day(dt)
-
-    jds = np.empty(dts.shape)
-    for i,d in enumerate(dts.flat):
-        jds.flat[i] = _sp.julian_day(d)
-    return jds
-
-def arcdist(p0,p1,radians=False):
-    """Angular distance between azimuth,zenith pairs
-    
-    Parameters
-    ----------
-    p0 : array_like, shape (..., 2)
-    p1 : array_like, shape (..., 2)
-        p[...,0] = azimuth angles, p[...,1] = zenith angles
-    radians : boolean (default False)
-        If False, angles are in degrees, otherwise in radians
-
-    Returns
-    -------
-    ad :  array_like, shape is broadcast(p0,p1).shape
-        Arcdistances between corresponding pairs in p0,p1
-        In degrees by default, in radians if radians=True
-    """
-    #formula comes from translating points into cartesian coordinates
-    #taking the dot product to get the cosine between the two vectors
-    #then arccos to return to angle, and simplify everything assuming real inputs
-    p0,p1 = np.array(p0), np.array(p1)
-    if not radians:
-        p0,p1 = np.deg2rad(p0), np.deg2rad(p1)
-    a0,z0 = p0[...,0], p0[...,1]
-    a1,z1 = p1[...,0], p1[...,1]
-    d = np.arccos(np.cos(z0)*np.cos(z1)+np.cos(a0-a1)*np.sin(z0)*np.sin(z1))
-    if radians:
-        return d
-    else:
-        return np.rad2deg(d)
-
-def observed_sunpos(dt, latitude, longitude, elevation, temperature=None, pressure=None, delta_t=0, radians=False):
-    """Compute the observed coordinates of the sun as viewed at the given time and location.
-
-    Parameters
-    ----------
-    dt : array_like
-        UTC datetime objects or UTC timestamps (as per datetime.utcfromtimestamp) representing the times of observations
-    latitude, longitude : float
-        decimal degrees, positive for north of the equator and east of Greenwich
-    elevation : float
-        meters, relative to the WGS-84 ellipsoid
-    temperature : array_like or None, optional
-        celcius, default is 14.6 (global average in 2013)
-    pressure : array_like or None, optional
-        millibar, default is 1013 (global average in ??)
-    delta_t : array_like, optional
-        seconds, default is 0, difference between the earth's rotation time (TT) and universal time (UT)
-    radians : {True, False}, optional
-        return results in radians if True, degrees if False (default)
-
-    Returns
-    -------
-    coords : ndarray, (...,2)
-        The shape of the array is parameters broadcast together, plus a final dimension for the coordinates.
-        coords[...,0] = observed azimuth angle, measured eastward from north
-        coords[...,1] = observed zenith angle, measured down from vertical
-    """
-    if temperature is None:
-        temperature = 14.6
-    if pressure is None:
-        pressure = 1013
-    
-    #6367444 = radius of earth
-    #numpy broadcasting
-    b = np.broadcast(dt,latitude,longitude,elevation,temperature,pressure,delta_t)
-    res = np.empty(b.shape+(2,))
-    res_vec = res.reshape((-1,2))
-    for i,x in enumerate(b):
-        res_vec[i] = _sp.pos(*x)[:2]
-    if radians:
-        res = np.deg2rad(res)
-    return res
-
-def topocentric_sunpos(dt, latitude, longitude, temperature=None, pressure=None, delta_t=0, radians=False):
-    """Compute the topocentric coordinates of the sun as viewed at the given time and location.
-
-    Parameters
-    ----------
-    dt : array_like
-        UTC datetime objects or UTC timestamps (as per datetime.utcfromtimestamp) representing the times of observations
-    latitude, longitude : array_like
-        decimal degrees, positive for north of the equator and east of Greenwich
-    elevation : array_like
-        meters, relative to the WGS-84 ellipsoid
-    temperature : array_like or None, optional
-        celcius, default is 14.6 (global average in 2013)
-    pressure : array_like or None, optional
-        millibar, default is 1013 (global average in ??)
-    delta_t : array_like, optional
-        seconds, default is 0, difference between the earth's rotation time (TT) and universal time (UT)
-    radians : {True, False}, optional
-        return results in radians if True, degrees if False (default)
-
-    Returns
-    -------
-    coords : ndarray, (...,3)
-        The shape of the array is parameters broadcast together, plus a final dimension for the coordinates.
-        coords[...,0] = topocentric right ascension
-        coords[...,1] = topocentric declination
-        coords[...,2] = topocentric hour angle
-    """
-    if temperature is None:
-        temperature = 14.6
-    if pressure is None:
-        pressure = 1013
-    
-    #6367444 = radius of earth
-    #numpy broadcasting
-    b = np.broadcast(dt,latitude,longitude,elevation,temperature,pressure,delta_t)
-    res = np.empty(b.shape+(2,))
-    res_vec = res.reshape((-1,2))
-    for i,x in enumerate(b):
-        res_vec[i] = _sp.topo_pos(*x)
-    if radians:
-        res = np.deg2rad(res)
-    return res   
-
-def sunpos(dt, latitude, longitude, elevation, temperature=None, pressure=None, delta_t=0, radians=False):
-    """Compute the observed and topocentric coordinates of the sun as viewed at the given time and location.
-
-    Parameters
-    ----------
-    dt : array_like
-        UTC datetime objects or UTC timestamps (as per datetime.utcfromtimestamp) representing the times of observations
-    latitude, longitude : array_like
-        decimal degrees, positive for north of the equator and east of Greenwich
-    elevation : array_like
-        meters, relative to the WGS-84 ellipsoid
-    temperature : array_like or None, optional
-        celcius, default is 14.6 (global average in 2013)
-    pressure : array_like or None, optional
-        millibar, default is 1013 (global average in ??)
-    delta_t : array_like, optional
-        seconds, default is 0, difference between the earth's rotation time (TT) and universal time (UT)
-    radians : {True, False}, optional
-        return results in radians if True, degrees if False (default)
-
-    Returns
-    -------
-    coords : ndarray, (...,5)
-        The shape of the array is parameters broadcast together, plus a final dimension for the coordinates.
-        coords[...,0] = observed azimuth angle, measured eastward from north
-        coords[...,1] = observed zenith angle, measured down from vertical
-        coords[...,2] = topocentric right ascension
-        coords[...,3] = topocentric declination
-        coords[...,4] = topocentric hour angle
-    """
-
-    if temperature is None:
-        temperature = 14.6
-    if pressure is None:
-        pressure = 1013
-    
-    #6367444 = radius of earth
-    #numpy broadcasting
-    b = np.broadcast(dt,latitude,longitude,elevation,temperature,pressure,delta_t)
-    res = np.empty(b.shape+(5,))
-    res_vec = res.reshape((-1,5))
-    for i,x in enumerate(b):
-        res_vec[i] = _sp.pos(*x)
-    if radians:
-        res = np.deg2rad(res)
-    return res
-
-def main(args):
-    az, zen, ra, dec, h = sunpos(args.t, args.lat, args.lon, args.elev, args.temp, args.p, args.dt, args.rad)
-    if args.csv:
-        #machine readable
-        print('{t}, {dt}, {lat}, {lon}, {elev}, {temp}, {p}, {az}, {zen}, {ra}, {dec}, {h}'.format(t=args.t, dt=args.dt, lat=args.lat, lon=args.lon, elev=args.elev,temp=args.temp, p=args.p,az=az, zen=zen, ra=ra, dec=dec, h=h))
-    else:
-        dr='deg'
-        if args.rad:
-            dr='rad'
-        print("Computing sun position at T = {t} + {dt} s".format(t=args.t, dt=args.dt))
-        print("Lat, Lon, Elev = {lat} deg, {lon} deg, {elev} m".format(lat=args.lat, lon=args.lon, elev=args.elev))
-        print("T, P = {temp} C, {press} mbar".format(temp=args.temp, press=args.p))
-        print("Results:")
-        print("Azimuth, zenith = {az} {dr}, {zen} {dr}".format(az=az,zen=zen,dr=dr))
-        print("RA, dec, H = {ra} {dr}, {dec} {dr}, {h} {dr}".format(ra=ra, dec=dec, h=h, dr=dr))
-
-if __name__ == '__main__':
-    from argparse import ArgumentParser
-    import datetime, sys
-    parser = ArgumentParser(prog='sunposition',description='Compute sun position parameters given the time and location')
-    parser.add_argument('--version',action='version',version='%(prog)s 1.0')
-    parser.add_argument('--citation',dest='cite',action='store_true',help='Print citation information')
-    parser.add_argument('-t,--time',dest='t',type=str,default='now',help='"now" or date and time (UTC) in "YYYY-MM-DD hh:mm:ss.ssssss" format or a (UTC) POSIX timestamp')
-    parser.add_argument('-lat,--latitude',dest='lat',type=float,default=51.48,help='latitude, in decimal degrees, positive for north')
-    parser.add_argument('-lon,--longitude',dest='lon',type=float,default=0.0,help='longitude, in decimal degrees, positive for east')
-    parser.add_argument('-e,--elevation',dest='elev',type=float,default=0,help='elevation, in meters')
-    parser.add_argument('-T,--temperature',dest='temp',type=float,default=14.6,help='temperature, in degrees celcius')
-    parser.add_argument('-p,--pressure',dest='p',type=float,default=1013.0,help='atmospheric pressure, in millibar')
-    parser.add_argument('-dt',type=float,default=0.0,help='difference between earth\'s rotation time (TT) and universal time (UT1)')
-    parser.add_argument('-r,--radians',dest='rad',action='store_true',help='Output in radians instead of degrees')
-    parser.add_argument('--csv',dest='csv',action='store_true',help='Comma separated values (time,dt,lat,lon,elev,temp,pressure,az,zen,RA,dec,H)')
-    args = parser.parse_args()
-    if args.cite:
-        print("Implementation: Samuel Bear Powell, 2016")
-        print("Algorithm:")
-        print("Ibrahim Reda, Afshin Andreas, \"Solar position algorithm for solar radiation applications\", Solar Energy, Volume 76, Issue 5, 2004, Pages 577-589, ISSN 0038-092X, doi:10.1016/j.solener.2003.12.003")
-        sys.exit(0)
-    if args.t == "now":
-        args.t = datetime.datetime.utcnow()
-    elif ":" in args.t and "-" in args.t:
-        try:
-            args.t = datetime.datetime.strptime(args.t,'%Y-%m-%d %H:%M:%S.%f') #with microseconds
-        except:
-            try:
-                args.t = datetime.datetime.strptime(args.t,'%Y-%m-%d %H:%M:%S.') #without microseconds
-            except:
-                args.t = datetime.datetime.strptime(args.t,'%Y-%m-%d %H:%M:%S')
-    else:
-        args.t = datetime.datetime.utcfromtimestamp(int(args.t))
-    main(args)
diff --git a/build/lib/utils/utils.py b/build/lib/utils/utils.py
deleted file mode 100644
index 015cf7d2d20fcfaf9a4aed66fe9af70472b0b7f8..0000000000000000000000000000000000000000
--- a/build/lib/utils/utils.py
+++ /dev/null
@@ -1,309 +0,0 @@
-import pandas as pd
-import numpy as np
-from scipy.interpolate import interp1d
-
-from utils.sunposition import sunpos
-from trios.config import *
-
-class awr_data:
-    '''
-    Above-water radiometry
-    '''
-
-    def __init__(self, idpr=None, files=None):
-        # ''' get file names for Ed, Lsky and Lt data'''
-        self.file = list(filter(lambda x: 'idpr' + idpr in x, files))
-        file = self.file
-        self.Edf = list(filter(lambda x: 'Ed' in x, file))
-        self.Lskyf = list(filter(lambda x: 'Lsky' in x, file))
-        self.Ltf = list(filter(lambda x: 'Lt' in x, file))
-        self.idpr = idpr
-
-    def reader(self, lat, lon, alt=0, name='', index_idx=[0]):
-        '''
-        Read above-water data files for a given acquisition series (idpr),
-        merge the different data types:
-          - by interpolating over wavelengths on a common band set (from those of Lt sensor)
-          - by searching the nearest neighbor in time
-        compute solar zenith angle
-        return full data frame
-
-        :param Edf: file path of irradiance data
-        :param Lskyf: file pat of sky radiance data
-        :param Ltf:  file path of water radiance data
-        :param lat: latitude (decimal)
-        :param lon: longitude (decimal)
-        :param alt: altitude (m)
-        :param idpr: ID of the acquisition series
-        :return:
-        '''
-
-        df = pd.DataFrame()
-
-        # ''' read files with pandas format '''
-        d = data(index_idx)
-        Ed, wl_Ed = d.load_csv(self.Edf)
-        Lsky, wl_Lsky = d.load_csv(self.Lskyf)
-        Lt, wl_Lt = d.load_csv(self.Ltf)
-
-        # ''' interpolate Ed, Lt and Lsky data upon common wavelength'''
-        wl = wl_common
-        Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
-        intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index,columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl),
-                             names=['param', 'wl']),data=intEd)
-
-        intLt = interp1d(wl_Lt, Lt.values, fill_value='extrapolate')(wl)
-        newLt = pd.DataFrame(index=Lt.index,columns=pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl),
-                             names=['param', 'wl']),data=intLt)
-
-        intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
-        newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
-                               names=['param', 'wl']), data=intLsky)
-
-        # merge sensor data on time
-        df = pd.merge_asof(newLt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-        df = pd.merge_asof(df, newLsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-
-        # add solar angle data and idpr
-        # compute solar angle (mean between fisrt and last aqcuisition time
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-
-        df['idpr', ''] = self.idpr
-        df['name', ''] = name
-
-        return df, wl
-
-
-class iwr_data:
-    '''
-    In-water radiometry
-    '''
-
-    def __init__(self, idpr, files):
-        # ''' get file names for Ed, Lsky and Lt data'''
-        self.file = list(filter(lambda x: 'idpr' + idpr in x, files))
-        file = self.file
-        self.Edf = list(filter(lambda x: 'Ed_' in x, file))
-        self.Edzf = list(filter(lambda x: 'Edz' in x, file))
-        self.Luzf = list(filter(lambda x: 'Luz' in x, file))
-        self.idpr = idpr
-
-    def reader(self, lat, lon, alt=0, name='', delta_Lu_depth=0, delta_Edz_depth=0):
-        '''
-        Read above-water data files for a given acquisition series (idpr),
-        merge the different data types:
-          - by interpolating over wavelengths on a common band set (from those of Lt sensor)
-          - by searching the nearest neighbor in time
-        compute solar zenith angle
-        return full data frame
-
-        :param Edf: file path of irradiance data
-        :param Edzf: file pat of downward in-water irradiance data
-        :param Luzf:  file path of upward in-water radiance data
-        :param lat: latitude (decimal)
-        :param lon: longitude (decimal)
-        :param alt: altitude (m)
-        :param delta_Lu_depth: adjustment of actual depth for Lu sensor (distance from depth sensor);
-                               in meters for depth counted positively
-        :param delta_Edz_depth: similar to delta_Lu_depth for Edz sensor
-        :param idpr: ID of the acquisition series
-        :return:
-        '''
-
-        df = pd.DataFrame()
-
-        # ''' read files with pandas format '''
-        d = data([1, 0])
-
-        Ed, wl_Ed = d.load_csv(self.Edf)
-        Edz, wl_Edz = d.load_csv(self.Edzf)
-        Luz, wl_Luz = d.load_csv(self.Luzf)
-
-        #mask negative values TODO save number of discarded data
-        # Ed.mask(Ed<0,inplace=True)
-        # Edz.mask(Edz<0,inplace=True)
-        # Luz.mask(Luz<0,inplace=True)
-
-        # copy depth data to Ed frame on date index
-        # Ed.index = Ed.index.droplevel(level=1)
-
-        #''' interpolate Ed, Edz and Luz data upon common wavelength'''
-        wl = wl_common
-        Luz.columns = pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)), names=['param', 'wl'])
-        intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index.get_level_values(0),
-                             columns=pd.MultiIndex.from_tuples(list(zip(['Ed'] * len(wl), wl)), names=['param', 'wl']),
-                             data=intEd)
-
-        intEdz = interp1d(wl_Edz, Edz.values, fill_value='extrapolate')(wl)
-        newEdz = pd.DataFrame(index=Edz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Edz'] * len(wl), wl)),
-                              names=['param', 'wl']), data=intEdz)
-
-        intLuz = interp1d(wl_Luz, Luz.values, fill_value='extrapolate')(wl)
-        newLuz = pd.DataFrame(index=Luz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)),
-                              names=['param', 'wl']), data=intLuz)
-
-
-        # correct depth data for sensor to sensor distance
-        newLuz.reset_index(level=1, inplace=True)
-        newLuz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
-        # newEd.reset_index(level=1,inplace=True)
-
-        newEdz.reset_index(level=1, inplace=True)
-        newEdz.iloc[:, 0] = newEdz.iloc[:, 0] + delta_Edz_depth
-
-        # merge sensor data on time
-        df = pd.merge_asof(newLuz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-        df = pd.merge_asof(df, newEdz, left_index=True, right_index=True, suffixes=('_Luz', '_Edz'),
-                           tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")  # by="depth",
-
-        # add solar angle data and idpr
-        # compute solar angle (mean between fisrt and last acquisition time
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-
-        df['idpr', ''] = self.idpr
-        df['name', ''] = name
-
-        return df, wl
-
-    # def load_csv(self, file):
-    #
-    #     dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
-    #     if len(file) > 1:
-    #         print('Warning! Multiple files found but only one expected, trios first file of the list:')
-    #         print(file)
-    #     file = file[0]
-    #     df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
-    #     df = df.dropna(axis=1, how='all').dropna(axis=0, how='all')
-    #     df.index = df.index.set_levels([pd.to_datetime(df.index.levels[0]), df.index.levels[1]])
-    #     df.columns = df.columns.astype('float')  # str.extract('(\d+)',expand=False).astype('float')
-    #     # resort to get data in increasing time order
-    #     df.sort_index(inplace=True, level=0)
-    #     wl = df.columns
-    #
-    #     return df, wl
-
-
-class swr_data:
-    '''
-    Surface-water radiometry
-    '''
-
-    def __init__(self, idpr, files):
-        # ''' get file names for Ed, Lsky and Lt data'''
-        self.file = list(filter(lambda x: 'idpr' + idpr in x, files))
-        file = self.file
-        self.Edf = list(filter(lambda x: '_Ed' in x, file))
-        self.Lu0f = list(filter(lambda x: '_Lu0+' in x, file))
-        self.idpr = idpr
-
-    def reader(self, lat=None, lon=None, alt=0):
-        '''
-        Read above-water data files for a given acquisition series (idpr),
-        merge the different data types:
-          - by interpolating over wavelengths on a common band set (from those of Lt sensor)
-          - by searching the nearest neighbor in time
-        compute solar zenith angle
-        return full data frame
-
-        :param Edf: file path of irradiance data
-        :param Lu0f:  file path of upward in-water radiance data
-        :param lat: latitude (decimal)
-        :param lon: longitude (decimal)
-        :param alt: altitude (m)
-        :param idpr: ID of the acquisition series
-        :return:
-        '''
-
-        df = pd.DataFrame()
-
-        # ''' read files with pandas format '''
-        Ed, wl_Ed = data().load_csv(self.Edf)
-        Lu0, wl_Lu0 = data().load_csv(self.Lu0f)
-
-        # ''' interpolate Ed and Lsky data upon common wavelengths'''
-        wl = wl_common
-        Lu0.columns = pd.MultiIndex.from_tuples(zip(['Lu0+'] * len(wl), wl), names=['param', 'wl'])
-        intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
-        newEd = pd.DataFrame(index=Ed.index,
-                             columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
-                             data=intEd)
-        intLu0 = interp1d(wl_Lu0, Lu0.values, fill_value='extrapolate')(wl)
-        newLu0 = pd.DataFrame(index=Lu0.index, columns=pd.MultiIndex.from_tuples(zip(['Lu0'] * len(wl), wl),
-                             names=['param', 'wl']), data=intLu0)
-
-        # merge sensor data on time
-        df = pd.merge_asof(newLu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
-                           direction="nearest")
-
-        # add solar angle data and idpr
-        # compute solar angle (mean between fisrt and last aqcuisition time
-        df['sza', ''] = np.nan
-        for index, row in df.iterrows():
-            # print index
-            sza = sunpos(index, lat, lon, alt)[1]
-            df.at[index, 'sza'] = sza
-
-        df['idpr', ''] = self.idpr
-
-        return df, wl
-
-
-class data:
-    def __init__(self, index_idx=[0]):
-        # first position should be datetime index
-        # followed by the other parameters used for indexing (e.g. azimuth, view angle)
-        self.index_idx = index_idx
-        pass
-
-    def load_csv(self, file):
-        print(file)
-        dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
-        if len(file) > 1:
-            print('Warning! Multiple files found but only one expected, trios first file of the list:')
-            print(file)
-        file_ = file[0]
-        # df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])
-        df = pd.read_csv(file_, sep=';', na_values=['-NAN'])
-
-        # get list of indexes
-        col = df.columns.values[self.index_idx]
-        df[col[0]] = pd.to_datetime(df[col[0]])
-
-        df.set_index(col.tolist(), inplace=True)
-        df = df.dropna(axis=1, how='all').dropna(axis=0, how='all')
-        df.columns = df.columns.astype('float')  # str.extract('(\d+)',expand=False).astype('float')
-        # resort to get data in increasing time order
-        df.sort_index(inplace=True)
-        wl = df.columns
-        return df, wl
-
-
-class reshape:
-    def __init__(self):
-        pass
-
-    def ndarray2df(self, arr, grid, names):
-        arr = np.column_stack(list(map(np.ravel, np.meshgrid(*grid))) + [arr.ravel()])
-        df = pd.DataFrame(arr, columns=names)  # e.g., names=['wind','aot','wl','sza','azi','vza','rho','rho_g'])
-        return df
-
-    def df2ndarray(self, df, name):
-        shape = list(map(len, df.index.levels))
-        arr = np.full(shape, np.nan)
-        # fill it using Numpy's advanced indexing
-        arr[tuple(df.index.labels)] = df[name].values.flat
-        return arr
diff --git a/build/lib/visu/__init__.py b/build/lib/visu/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/build/lib/visu/data_visu.py b/build/lib/visu/data_visu.py
deleted file mode 100644
index 539a7b04f45c5abe0ee5a5bcf6df07df81b4b959..0000000000000000000000000000000000000000
--- a/build/lib/visu/data_visu.py
+++ /dev/null
@@ -1,349 +0,0 @@
-import base64
-import io
-import re
-from textwrap import dedent as d
-
-import dash
-import dash_core_components as dcc
-import dash_html_components as html
-import matplotlib as mpl
-from matplotlib import cm
-import numpy as np
-import pandas as pd
-import plotly.graph_objs as go
-from dash.dependencies import Input, Output
-
-
-def main():
-    app = dash.Dash()
-    # app.css.append_css('data/aeronet_layout.css')
-    styles = {
-        'pre': {
-            'border': 'thin lightgrey solid',
-            'overflowX': 'scroll'
-        }
-    }
-
-    # ------------------------------------------------------
-    # layout section
-    # ------------------------------------------------------
-
-    app.layout = html.Div([
-        html.Div([
-            html.H1(
-                'Above-water data visualization',
-                className='eight columns',
-                style={'display': 'inline-block'}),
-
-            #   file selection box
-            dcc.Upload(
-                id='upload-data',
-                children=html.Div([
-                    'Drag and Drop or ',
-                    html.A('Select Files')
-                ]),
-                style={
-                    'width': '40%',
-                    'height': '60px',
-                    'lineHeight': '60px',
-                    'borderWidth': '1px',
-                    'borderStyle': 'dashed',
-                    'borderRadius': '5px',
-                    'textAlign': 'center',
-                    'margin': '10px',
-                    'float': 'right'
-                },
-                # Allow multiple files to be uploaded
-                multiple=False)],
-            style={'margin-top': '0',
-                   'width': '100%', 'display': 'inline-block',
-                   }),
-        html.Div([
-            html.H4('File...', id='filename', style={'float': 'left', 'width': '60%'}),
-            html.Div([
-                html.H4('Color variable:', style={'margin-bottom': '0', 'width': '50%'}),
-                html.Div([
-                    dcc.Dropdown(
-                        id='color-column',
-                        value='sza',
-
-                    ),
-                    # dcc.RadioItems(
-                    #     id='color-type',
-                    #     options=[{'label': i, 'value': i} for i in ['Linear', 'Log']],
-                    #     value='Linear',
-                    #     labelStyle={'width': '25%','display': 'inline-block'})],
-                ],
-                    style={'width': '50%', 'display': 'inline-block'})],
-                style={'width': '40%',
-                       'margin-top': '0',
-                       'display': 'inline-block',
-                       'margin-left': '0%',
-                       'float': 'right'})],
-            style={'margin-block-end': '7%'}),
-
-        # Spectrum graphs
-        html.Div([
-            html.Div([
-
-                html.H4('Spectral parameter 1'),
-                dcc.Dropdown(
-                    id='spectrum1',
-                    value='Ed'),
-            ],
-                style={'width': '48.9%',
-                       'float': 'left', }),
-
-            html.Div([
-
-                html.H4('Spectral parameter 2'),
-                dcc.Dropdown(
-                    id='spectrum2',
-                    value='Lt'),
-            ],
-                style={'width': '48.9%',
-                       'float': 'right', }), ],
-            style={'width': '100%', 'margin-block-start': '1%'}),
-
-        html.Div([
-            html.Div([
-                dcc.Graph(id='graph1')],
-                # className='eight columns',
-                style={'width': '49.9%',
-                       'margin-top': '0',
-                       'display': 'inline-block',
-
-                       }),
-            html.Div([
-                dcc.Graph(id='graph2')],
-                # className='eight columns',
-                style={'width': '49.9%',
-                       'float': 'right',
-                       'display': 'inline-block',
-
-                       }),
-        ],
-
-            style={'height': '20%'},
-            # className='row'
-        ),
-
-        # Spectrum graphs
-        html.Div([
-            html.Div([
-                dcc.Graph(id='graph3')],
-                # className='eight columns',
-                style={'width': '49.9%',
-                       'margin-top': '0',
-                       'display': 'inline-block',
-
-                       }),
-            html.Div([
-                dcc.Graph(id='graph4')],
-                # className='eight columns',
-                style={'width': '49.9%',
-                       'float': 'right',
-                       'display': 'inline-block',
-
-                       }),
-        ],
-
-            # style={'display': 'inline-block'},
-            # className='row'
-        ),
-
-        html.Div([
-
-            html.H4('Spectral parameter'),
-            dcc.Dropdown(
-                id='spectrum3',
-                value='Rrs(awr)'),
-        ],
-            style={'width': '48.9%',
-                   'float': 'left', }),
-
-        html.Div([
-
-            html.H4('Spectral parameter'),
-            dcc.Dropdown(
-                id='spectrum4',
-                value='Rrs(swr)'),
-        ],
-            style={'width': '48.9%',
-                   'float': 'right', }),
-
-        html.Div([
-            dcc.Markdown(d("""
-                    **Selection Data**
-    
-                    Choose the lasso or rectangle tool in the graph's menu
-                    bar and then select points in the graph.
-                """)),
-            # html.Pre(id='selected-data', style=styles['pre']),
-        ], className='three columns'),
-
-        # hidden signal value
-        html.Div(id='dataset', style={'display': 'none'}),
-    ],
-
-        style={
-            'width': '90%',
-            'fontFamily': 'Sans-Serif',
-            'margin-left': 'auto',
-            'margin-right': 'auto'})
-
-    def figure_spectrum(df, column_name, color_column_name):
-        dff = df
-        layout = go.Layout(xaxis={'title': 'Wavelength (nm)'},
-                           yaxis={'title': column_name},
-                           margin={'l': 50, 'b': 40, 't': 20, 'r': 50},
-                           hovermode='closest',
-                           height=300,
-                           font=dict(color='#CCCCCC'),
-                           titlefont=dict(color='#CCCCCC', size=14),
-                           plot_bgcolor="#191A1A",
-                           paper_bgcolor="#020202")
-
-        if not (column_name in dff.columns.get_level_values(0)):
-            return {'data': [],
-                    'layout': layout}
-
-        parameters = df.loc[:, (color_column_name)].values
-        dff = dff.loc[:, ([column_name])]
-        wl = dff.columns.droplevel().values
-        #dff = dff.stack(level=['wl'])
-        norm = mpl.colors.Normalize(vmin=np.nanmin(parameters), vmax=np.nanmax(parameters))
-        # create a ScalarMappable and initialize a data structure
-        cmap = cm.Spectral
-        s_m = cm.ScalarMappable(cmap=cmap, norm=norm)
-        s_m.set_array([])
-        i = 0
-        trace = []
-        #for date, x_ in dff.groupby(level=0):
-        for idx, x in dff.iterrows():
-            date = x.name.__str__()
-            #print(idx,x)
-            trace.append(go.Scattergl(
-                x=wl,  # spectrum,
-                y=x[column_name],
-                text=date,#str(parameters[i]),#dff.index.get_level_values(0),
-                name=str(parameters[i]),
-                mode='lines',
-                marker={
-                    'size': 7,
-                    'opacity': 0.5,
-                    # 'color': 'rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i]).flatten()),
-                    # x.unique(),#color': df.index.get_level_values(0),
-                    'line': {'width': 0.5, 'color': 'white'},
-                },
-                line=go.Line(color='rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i]).flatten()), width=2),
-                showlegend=False
-            ))
-            i = i + 1
-
-        # spectrum = df[label['aod']].stack()
-        return {
-            'data': trace,
-            'layout': layout
-        }
-
-    def list_data(contents, level=0):
-        # content_type, content_string = contents.split(',')
-        # decoded = base64.b64decode(content_string)
-        # df = pd.read_csv(io.StringIO(decoded.decode('utf-8')), header=[0, 1, 2], index_col=0, nrows=0, parse_dates=True)
-        c = df.columns.levels[level]
-        # remove useless variables
-        c = c.drop(filter(lambda s: re.match('.*(Wave|Tri|[sS]ite|Dat)', s), c))
-        return [{'label': i, 'value': i} for i in c]
-
-    #
-    def parse_contents(contents):
-        global df
-        content_type, content_string = contents.split(',')
-        decoded = base64.b64decode(content_string)
-
-        df = pd.read_csv(io.StringIO(decoded.decode('utf-8')), header=[0, 1], index_col=0, parse_dates=True)
-
-    # ------------------------------------------------------
-    # callback section
-    # ------------------------------------------------------
-    # ---------------------------
-    # update uploaded data
-
-    @app.callback(Output('dataset', 'children'),
-                  [Input('upload-data', 'contents'),
-                   Input('upload-data', 'filename')])
-    def update_output(contents, filename):
-        print(filename)
-        parse_contents(contents)
-        return contents
-
-    @app.callback(Output('filename', 'children'),
-                  [Input('upload-data', 'filename')])
-    def show_filename(filename):
-        return 'File: ' + str(filename)
-
-    # ---------------------------
-    # update dropdown menus
-
-    @app.callback(Output('color-column', 'options'),
-                  [Input('dataset', 'children')])
-    def update_dropdown(contents):
-        return list_data(contents)
-
-    @app.callback(Output('spectrum1', 'options'),
-                  [Input('dataset', 'children')])
-    def update_dropdown(contents):
-        return list_data(contents, level=0)
-
-    @app.callback(Output('spectrum2', 'options'),
-                  [Input('dataset', 'children')])
-    def update_dropdown(contents):
-        return list_data(contents, level=0)
-
-    @app.callback(Output('spectrum3', 'options'),
-                  [Input('dataset', 'children')])
-    def update_dropdown(contents):
-        return list_data(contents, level=0)
-
-    @app.callback(Output('spectrum4', 'options'),
-                  [Input('dataset', 'children')])
-    def update_dropdown(contents):
-        return list_data(contents, level=0)
-
-    # selection from time series graph -> spectrum graph
-    @app.callback(Output('graph1', 'figure'),
-                  [Input('spectrum1', 'value'),
-                   Input('color-column', 'value'),
-                   Input('dataset', 'children')])
-    def spectrum_figure(column_name, color_column_name, void):
-        return figure_spectrum(df, column_name, color_column_name)
-
-    # selection from time series graph -> spectrum graph
-    @app.callback(Output('graph2', 'figure'),
-                  [Input('spectrum2', 'value'),
-                   Input('color-column', 'value'),
-                   Input('dataset', 'children')])
-    def spectrum_figure(column_name, color_column_name, void):
-        return figure_spectrum(df, column_name, color_column_name)
-
-    @app.callback(Output('graph3', 'figure'),
-                  [Input('spectrum3', 'value'),
-                   Input('color-column', 'value'),
-                   Input('dataset', 'children')])
-    def spectrum_figure(column_name, color_column_name, void):
-        return figure_spectrum(df, column_name, color_column_name)
-
-    # selection from time series graph -> spectrum graph
-    @app.callback(Output('graph4', 'figure'),
-                  [Input('spectrum4', 'value'),
-                   Input('color-column', 'value'),
-                   Input('dataset', 'children')])
-    def spectrum_figure(column_name, color_column_name, void):
-        return figure_spectrum(df, column_name, color_column_name)
-
-    app.run_server(port=8060)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/build/lib/visu/visu.py b/build/lib/visu/visu.py
deleted file mode 100644
index 6eb96b4c5fec6eb4a82ace3acb8810fa80374e78..0000000000000000000000000000000000000000
--- a/build/lib/visu/visu.py
+++ /dev/null
@@ -1,492 +0,0 @@
-from plotly.graph_objs import *
-import matplotlib as mpl
-import numpy as np
-
-import os
-import pandas as pd
-import numpy as np
-import flask
-import json
-import base64
-import datetime
-import io
-import plotly
-import plotly.plotly as py
-import plotly.graph_objs as go
-import matplotlib as mpl
-from textwrap import dedent as d
-
-import re
-
-import dash
-from dash.dependencies import Input, Output, State
-import dash_core_components as dcc
-import dash_html_components as html
-import dash_table_experiments as dte
-from datetime import datetime
-
-app = dash.Dash()
-# app.css.append_css('data/aeronet_layout.css')
-styles = {
-    'pre': {
-        'border': 'thin lightgrey solid',
-        'overflowX': 'scroll'
-    }
-}
-
-# ------------------------------------------------------
-# layout section
-# ------------------------------------------------------
-
-app.layout = html.Div([
-    html.Div([
-        html.H1(
-            'Above-water data visualization',
-            className='eight columns',
-            style={'display': 'inline-block'}),
-
-        #   file selection box
-        dcc.Upload(
-            id='upload-data',
-            children=html.Div([
-                'Drag and Drop or ',
-                html.A('Select Files')
-            ]),
-            style={
-                'width': '40%',
-                'height': '60px',
-                'lineHeight': '60px',
-                'borderWidth': '1px',
-                'borderStyle': 'dashed',
-                'borderRadius': '5px',
-                'textAlign': 'center',
-                'margin': '10px',
-                'float': 'right'
-            },
-            # Allow multiple files to be uploaded
-            multiple=False)],
-        style={'margin-top': '0',
-               'width': '100%', 'display': 'inline-block',
-               }),
-    html.Div([
-        html.H4('File...', id='filename',style={'float': 'left','width': '60%'}),
-        html.Div([
-            html.H4('Color variable:',style={'margin-bottom': '0','width':'50%'}),
-            html.Div([
-                dcc.Dropdown(
-                    id='color-column',
-                    value='sza',
-
-                ),
-                # dcc.RadioItems(
-                #     id='color-type',
-                #     options=[{'label': i, 'value': i} for i in ['Linear', 'Log']],
-                #     value='Linear',
-                #     labelStyle={'width': '25%','display': 'inline-block'})],
-            ],
-            style={'width': '50%','display': 'inline-block'})],
-            style={'width': '40%',
-                   'margin-top': '0',
-                   'display': 'inline-block',
-                   'margin-left': '0%',
-                   'float': 'right'})],
-        style={'margin-block-end': '7%'}),
-
-    # Spectrum graphs
-    html.Div([
-        html.Div([
-
-            html.H4('Spectral parameter 1'),
-            dcc.Dropdown(
-                id='spectrum1',
-                value='Ed'),
-        ],
-            style={'width': '48.9%',
-                   'float': 'left', }),
-
-        html.Div([
-
-            html.H4('Spectral parameter 2'),
-            dcc.Dropdown(
-                id='spectrum2',
-                value='Lsky'),
-        ],
-            style={'width': '48.9%',
-                   'float': 'right', }),],
-        style={'width':'100%','margin-block-start': '1%'}),
-
-
-        html.Div([
-            dcc.Graph(id='graph1')],
-            # className='eight columns',
-            style={'width': '59.9%',
-                   'margin-top': '0',
-                   'display': 'inline-block',
-
-                   }),
-        html.Div([
-            dcc.Graph(id='graph2')],
-            # className='eight columns',
-            style={'width': '59.9%',
-                   'float': 'left',
-                   'margin-top': '1%',
-                   'display': 'inline-block',
-
-                   }),
-         # className='row'
-
-
-    # Spectrum graphs
-    html.Div([
-        html.Div([
-            dcc.Graph(id='graph3')],
-            # className='eight columns',
-            style={'width': '59.9%',
-                   'margin-top': '1%',
-                   'display': 'inline-block',
-
-                   }),
-
-    ],
-
-        # style={'display': 'inline-block'},
-        # className='row'
-    ),
-
-    html.Div([
-
-        html.H4('Spectral parameter'),
-        dcc.Dropdown(
-            id='spectrum3',
-            value='Lt'),
-    ],
-        style={'width': '48.9%',
-               'float': 'left', }),
-
-
-    html.Div([
-        dcc.Markdown(d("""
-                **Selection Data**
-
-                Choose the lasso or rectangle tool in the graph's menu
-                bar and then select points in the graph.
-            """)),
-        # html.Pre(id='selected-data', style=styles['pre']),
-    ], className='three columns'),
-
-    # hidden signal value
-    html.Div(id='dataset', style={'display': 'none'}),
-],
-
-    style={
-        'width': '90%',
-        'fontFamily': 'Sans-Serif',
-        'margin-left': 'auto',
-        'margin-right': 'auto'})
-
-
-
-
-def figure_spectrum_v1(df, column_name, color_column_name):
-    dff = df
-    parameters = df.loc[:, (color_column_name)].values
-    dff = dff.loc[:, ([column_name])]
-    wl = dff.columns.droplevel().values
-    dff = dff.stack(level=['wl'])
-    norm = mpl.colors.Normalize(vmin=np.nanmin(parameters), vmax=np.nanmax(parameters))
-    # create a ScalarMappable and initialize a data structure
-    cmap = mpl.cm.Spectral
-    s_m = mpl.cm.ScalarMappable(cmap=cmap, norm=norm)
-    s_m.set_array([])
-    i = 0
-    trace = []
-    for date, x in dff.groupby(level=0):
-        trace.append(Scattergl(
-            x=wl,  # spectrum,
-            y=x[column_name].values,
-            text="depth="+str(float(parameters[i])/100)+" m",#x.index.get_level_values(0),
-            mode='lines',
-            marker={
-                'size': 7,
-                'opacity': 0.5,
-                # 'color': 'rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i]).flatten()),
-                # x.unique(),#color': df.index.get_level_values(0),
-                'line': {'width': 0.5, 'color': 'white'},
-            },
-            line=Line(color='rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i])), width=2),
-            showlegend=False
-        ))
-        i = i + 1
-
-    # spectrum = df[label['aod']].stack()
-    return {
-        'data': trace,
-        'layout': Layout(
-            xaxis={
-                'title': 'Wavelength (nm)',
-
-            },
-            yaxis={
-                'title': column_name,
-
-            },
-            margin={'l': 50, 'b': 40, 't': 20, 'r': 50},
-            hovermode='closest',
-
-            height=400,
-            font=dict(color='#CCCCCC'),
-            titlefont=dict(color='#CCCCCC', size='14'),
-
-            plot_bgcolor="#191A1A",
-            paper_bgcolor="#020202",
-        )
-    }
-
-
-def figure_spectrum(df, column_name, color_column_name='depth'):
-
-
-    dff = df
-    if color_column_name=='depth':
-        parameters = df.loc[:, (color_column_name)].values
-        dff = dff.loc[:, ([column_name,color_column_name])]
-        dff.set_index('depth', append=True, inplace=True)
-
-    wl = dff.columns.droplevel().values
-    dff = dff.stack(level=['wl'])
-    # norm = mpl.colors.Normalize(vmin=np.nanmin(parameters), vmax=np.nanmax(parameters))
-    # # create a ScalarMappable and initialize a data structure
-    # cmap = mpl.cm.Spectral
-    # s_m = mpl.cm.ScalarMappable(cmap=cmap, norm=norm)
-    # s_m.set_array([])
-    i = 0
-    trace = []
-    colors = ['blue',  'green','grey', 'yellow', 'orange', 'red', 'purple']
-
-
-    group = np.array(dff.index.get_level_values(color_column_name),dtype=float)/100
-
-    opts = []
-    for i in range(0, len(colors)):
-        opt = {'target': np.unique(group)[i], 'value': dict(marker=dict(color=colors[i]))}
-        opts.append(opt)
-
-    aggs = ["all","avg","median","mode","rms","stddev","min","max"]
-    agg_func = []
-    for i in range(0, len(aggs)):
-        if i == 0:
-            agg = dict(
-                args=['transforms[0].aggregations[0].func',0],
-                label=aggs[i],
-                method='restyle'
-            )
-        else:
-            agg = dict(
-                args=[ 'transforms[0].aggregations[0].func',aggs[i]],
-                label=aggs[i],
-                method='restyle'
-            )
-
-        agg_func.append(agg)
-
-    trace=[dict(
-        type = 'scatter',
-        x=dff.index.get_level_values('wl'),  # spectrum,
-        y=dff[column_name].values,
-        text=dff.index.get_level_values(0),#group,#x.index.get_level_values(0),
-        hoverinfo = 'text',
-        name=dff.index.get_level_values(0),
-        mode='lines',
-        marker={
-            'size': 7,
-            'opacity': 0.5,
-            # 'color': 'rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i]).flatten()),
-            # x.unique(),#color': df.index.get_level_values(0),
-            'line': {'width': 0.5, 'color': 'white'},
-        },
-        #line=Line(color='rgba({}, {}, {}, {})'.format(*s_m.to_rgba(parameters[i])), width=2),
-        #showlegend=False,
-
-        transforms = [
-
-
-#            {'type': 'groupby', 'groups': dff.index.get_level_values(0),'showlegend': False},
-
-            {'type': 'aggregate', 'groups': dff.index.get_level_values('wl'), 'aggregations': [
-             dict(target='y', func='avg', enabled = True),]},
-            {'type': 'groupby', 'groups': group, 'styles': opts},
-         ]
-    )]
-
-    # spectrum = df[label['aod']].stack()
-    return {
-        'data': trace,
-        'layout':  dict(
-            xaxis={
-                'title': 'Wavelength (nm)',
-
-            },
-            yaxis={
-                'title': column_name,
-
-            },
-            margin={'l': 50, 'b': 40, 't': 20, 'r': 50},
-            hovermode='closest',
-
-            height=400,
-            font=dict(color='#CCCCCC'),
-            titlefont=dict(color='#CCCCCC', size='14'),
-
-            plot_bgcolor="#191A1A",
-            paper_bgcolor="#020202",
-            updatemenus = [dict(
-                x = 0.85,
-                y = 1.15,
-                xref = 'paper',
-                yref = 'paper',
-                yanchor = 'top',
-                active = 0,
-                showactive = True,
-                buttons = agg_func
-  )]
-        )
-    }
-
-
-def figure_profile(df, var_col='Luz', depth_col='depth_Luz'):
-
-
-    dff = df
-
-    dff = dff.loc[:, ([var_col,depth_col])]
-    dff.set_index(depth_col, append=True, inplace=True)
-
-    wl = dff.columns.droplevel().values
-    dff = dff.stack(level=['wl'])
-    norm = mpl.colors.Normalize(vmin=np.nanmin(wl), vmax=np.nanmax(wl))
-    # create a ScalarMappable and initialize a data structure
-    cmap = mpl.cm.Spectral
-    s_m = mpl.cm.ScalarMappable(cmap=cmap, norm=norm)
-    s_m.set_array([])
-    i = 0
-    trace = []
-    colors = ['blue',  'green','grey', 'yellow', 'orange', 'red', 'purple']
-
-
-    group = np.array(dff.index.get_level_values('wl'),dtype=int)
-
-    opts = []
-    for i in range(0, len(colors)):
-        opt = {'target': np.unique(group)[i], 'value': dict(marker=dict(color=colors[i]))}
-        opts.append(opt)
-
-    aggs = ["all","avg","median","mode","rms","stddev","min","max"]
-    agg_func = []
-    for i in range(0, len(aggs)):
-        if i == 0:
-            agg = dict(
-                args=['transforms[0].aggregations[0].func',0],
-                label=aggs[i],
-                method='restyle'
-            )
-        else:
-            agg = dict(
-                args=[ 'transforms[0].aggregations[0].func',aggs[i]],
-                label=aggs[i],
-                method='restyle'
-            )
-
-        agg_func.append(agg)
-
-    trace=[dict(
-        type = 'scattergl',
-        x=dff[var_col].values,  # spectrum,
-        y=dff.index.get_level_values(1),
-        text=dff.index.get_level_values(2),#group,#x.index.get_level_values(0),
-        hoverinfo = 'text',
-        name=dff.index.get_level_values(0),
-        mode='markers',
-        marker=dict(color=group,
-                    showscale=True,size= 7,
-                    colorscale='Jet',
-                    opacity= 0.5),
-        # marker={
-        #     'size': 7,
-        #     'opacity': 0.5,
-        #     'color': 'rgba({}, {}, {}, {})'.format(*s_m.to_rgba(wl).flatten()),
-        #     # x.unique(),#color': df.index.get_level_values(0),
-        #     'line': {'width': 0.5, 'color': 'white'},
-        # },
-        #line=Line(color='rgba({}, {}, {}, {})'.format(*s_m.to_rgba(wl)), width=2),
-        #showlegend=False,
-
-#         transforms = [
-#
-#
-# #            {'type': 'groupby', 'groups': dff.index.get_level_values(0),'showlegend': False},
-#
-#             # {'type': 'aggregate', 'groups': dff.index.get_level_values('wl'), 'aggregations': [
-#             #  dict(target='y', func='avg', enabled = True),]},
-#             {'type': 'groupby', 'groups': group, 'styles': opts},
-#          ]
-    )]
-
-    # spectrum = df[label['aod']].stack()
-    return {
-        'data': trace,
-        'layout':  dict(
-            xaxis={
-                'title': 'Wavelength (nm)',
-
-            },
-            yaxis={
-                'title': column_name,
-
-            },
-            margin={'l': 50, 'b': 40, 't': 20, 'r': 50},
-            hovermode='closest',
-
-            height=400,
-            font=dict(color='#CCCCCC'),
-            titlefont=dict(color='#CCCCCC', size='14'),
-
-            plot_bgcolor="#191A1A",
-            paper_bgcolor="#020202",
-            updatemenus = [dict(
-                x = 0.85,
-                y = 1.15,
-                xref = 'paper',
-                yref = 'paper',
-                yanchor = 'top',
-                active = 0,
-                showactive = True,
-                buttons = agg_func
-  )]
-        )
-    }
-
-@app.callback(Output('graph1', 'figure'),
-              [Input('spectrum1', 'value'),
-               Input('color-column', 'value'),
-               Input('dataset', 'children')])
-def spectrum_figure(column_name, color_column_name,void):
-
-    return figure_spectrum(df, 'Edz','depth')
-
-@app.callback(Output('graph2', 'figure'),
-              [Input('spectrum2', 'value'),
-               Input('color-column', 'value'),
-               Input('dataset', 'children')])
-def spectrum_figure(column_name, color_column_name,void):
-
-    return figure_spectrum(df, 'Luz','depth')
-
-@app.callback(Output('graph3', 'figure'),
-              [Input('spectrum3', 'value'),
-               Input('color-column', 'value'),
-               Input('dataset', 'children')])
-def spectrum_figure(column_name, color_column_name,void):
-
-    return figure_spectrum(df, 'Ed','depth')
-
-app.run_server()
diff --git a/trios.egg-info/top_level.txt b/trios.egg-info/top_level.txt
index 44d99fe34214daa471205cf88fe7da594f57c215..4bef0dd465ae1e6518a9200db1f18586c40f52a4 100644
--- a/trios.egg-info/top_level.txt
+++ b/trios.egg-info/top_level.txt
@@ -1,5 +1,4 @@
 aux
-build
 simulation
 trios
 utils