En raison du déménagement des baies serveurs, les services gitlab.irstea.fr et mattermost.irstea.fr seront interrompus le samedi 2 octobre 2021 au matin. Ils devraient revenir à la normale dans la journée.

Commit c9a02a96 authored by Harmel Tristan's avatar Harmel Tristan
Browse files

reorganize package trios and reproject all data on common wavelengths (set in config.py)

parent 6b4880fc
......@@ -9,7 +9,7 @@ from plotly.graph_objs import *
from utils.utils import reshape as r
import utils.auxdata as ua
from config import *
from ..config import *
class awr_process:
......
......@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
import utils.auxdata as ua
from process.process import *
from trios.process import *
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
coords = pd.read_csv(coordf, sep=';')
......
......@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
from process.process import *
from trios.process import *
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
......
......@@ -3,7 +3,7 @@ import numpy as np
from scipy.interpolate import interp1d
from utils.sunposition import sunpos
from trios.config import *
class awr_data:
'''
......@@ -46,18 +46,23 @@ class awr_data:
Lsky, wl_Lsky = d.load_csv(self.Lskyf)
Lt, wl_Lt = d.load_csv(self.Ltf)
# ''' interpolate Ed and Lsky data upon Lt wavelength'''
wl = wl_Lt
# ''' interpolate Ed, Lt and Lsky data upon common wavelength'''
wl = wl_common
Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
newEd = pd.DataFrame(index=Ed.index,
columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
data=intEd)
newEd = pd.DataFrame(index=Ed.index,columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl),
names=['param', 'wl']),data=intEd)
intLt = interp1d(wl_Lt, Lt.values, fill_value='extrapolate')(wl)
newLt = pd.DataFrame(index=Lt.index,columns=pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl),
names=['param', 'wl']),data=intLt)
intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
names=['param', 'wl']), data=intLsky)
names=['param', 'wl']), data=intLsky)
# merge sensor data on time
df = pd.merge_asof(Lt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
df = pd.merge_asof(newLt, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
df = pd.merge_asof(df, newLsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
......@@ -126,32 +131,36 @@ class iwr_data:
# Edz.mask(Edz<0,inplace=True)
# Luz.mask(Luz<0,inplace=True)
# copy depth data to Ed frame on date index
# Ed.index = Ed.index.droplevel(level=1)
#''' interpolate Ed and Lsky data upon Lt wavelength'''
wl = wl_Luz
#''' interpolate Ed, Edz and Luz data upon common wavelength'''
wl = wl_common
Luz.columns = pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)), names=['param', 'wl'])
intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
newEd = pd.DataFrame(index=Ed.index.get_level_values(0),
columns=pd.MultiIndex.from_tuples(list(zip(['Ed'] * len(wl), wl)), names=['param', 'wl']),
data=intEd)
intEdz = interp1d(wl_Edz, Edz.values, fill_value='extrapolate')(wl)
newEdz = pd.DataFrame(index=Edz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Edz'] * len(wl), wl)),
names=['param', 'wl']), data=intEdz)
names=['param', 'wl']), data=intEdz)
intLuz = interp1d(wl_Luz, Luz.values, fill_value='extrapolate')(wl)
newLuz = pd.DataFrame(index=Luz.index, columns=pd.MultiIndex.from_tuples(list(zip(['Luz'] * len(wl), wl)),
names=['param', 'wl']), data=intLuz)
# correct depth data for sensor to sensor distance
Luz.reset_index(level=1, inplace=True)
Luz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
newLuz.reset_index(level=1, inplace=True)
newLuz.iloc[:, 0] = Luz.iloc[:, 0] + delta_Lu_depth
# newEd.reset_index(level=1,inplace=True)
newEdz.reset_index(level=1, inplace=True)
newEdz.iloc[:, 0] = newEdz.iloc[:, 0] + delta_Edz_depth
# merge sensor data on time
df = pd.merge_asof(Luz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
df = pd.merge_asof(newLuz, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
df = pd.merge_asof(df, newEdz, left_index=True, right_index=True, suffixes=('_Luz', '_Edz'),
tolerance=pd.Timedelta("2 seconds"),
......@@ -174,7 +183,7 @@ class iwr_data:
#
# dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
# if len(file) > 1:
# print('Warning! Multiple files found but only one expected, process first file of the list:')
# print('Warning! Multiple files found but only one expected, trios first file of the list:')
# print(file)
# file = file[0]
# df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
......@@ -225,16 +234,19 @@ class swr_data:
Ed, wl_Ed = data().load_csv(self.Edf)
Lu0, wl_Lu0 = data().load_csv(self.Lu0f)
# ''' interpolate Ed and Lsky data upon Lt wavelength'''
wl = wl_Lu0
# ''' interpolate Ed and Lsky data upon common wavelengths'''
wl = wl_common
Lu0.columns = pd.MultiIndex.from_tuples(zip(['Lu0+'] * len(wl), wl), names=['param', 'wl'])
intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
newEd = pd.DataFrame(index=Ed.index,
columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
data=intEd)
intLu0 = interp1d(wl_Lu0, Lu0.values, fill_value='extrapolate')(wl)
newLu0 = pd.DataFrame(index=Lu0.index, columns=pd.MultiIndex.from_tuples(zip(['Lu0'] * len(wl), wl),
names=['param', 'wl']), data=intLu0)
# merge sensor data on time
df = pd.merge_asof(Lu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
df = pd.merge_asof(newLu0, newEd, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
# add solar angle data and idpr
......@@ -261,7 +273,7 @@ class data:
print(file)
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
if len(file) > 1:
print('Warning! Multiple files found but only one expected, process first file of the list:')
print('Warning! Multiple files found but only one expected, trios first file of the list:')
print(file)
file_ = file[0]
# df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])
......
'''where you can set absolute and relative path used in the package'''
import os
root = os.path.dirname(os.path.abspath(__file__))
M2015_file = os.path.join(root, 'aux/rhoTable_Mobley2015.csv')
M1999_file = os.path.join(root, 'aux/rhoTable_Mobley1999.csv')
rhosoaa_fine_file = os.path.join(root, 'aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv')
rhosoaa_coarse_file = os.path.join(root, 'aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv')
iopw_file = os.path.join(root, 'aux/water_coef.txt')
F0_file = os.path.join(root, 'aux/Thuillier_2003_0.3nm.dat')
......@@ -11,7 +11,7 @@ import plotly
import plotly.graph_objs as go
import utils.utils as u
from process.process import *
from trios.process import *
# import aeronet
# from config import *
......@@ -83,7 +83,7 @@ for idpr in idprs:
# iwr = u.iwr_data(idpr, iwrfiles)
# if iwr.file:
# df, wl = iwr.reader(c[1], c[2], c[3])
# Rrs = iwr_process(df, wl).process()
# Rrs = iwr_process(df, wl).trios()
# dff = pd.concat([dff, Rrs], axis=1)
# writing output file
......
......@@ -4,7 +4,7 @@ from grs import __version__
setup(
name='trios',
version=__version__,
packages=find_packages(),
packages=find_packages(exclude=['build']),
package_data={'': ['*.so']},
# # If any package contains *.txt files, include them:
# '': ['*.txt'],
......@@ -17,7 +17,7 @@ setup(
license='MIT',
author='T. Harmel',
author_email='tristan.harmel@ntymail.com',
description='Package to help process TriOS radiometer data for various above-water or in-water setups',
description='Package to help trios TriOS radiometer data for various above-water or in-water setups',
# TODO update Dependent packages (distributions)
install_requires=['dash','dash_core_components','dash_html_components','pandas', 'scipy', 'numpy', 'netCDF4', 'matplotlib', 'docopt', 'GDAL', 'python-dateutil'],
......
......@@ -4,7 +4,7 @@ import numpy as np
import cmocean
import matplotlib.pyplot as plt
from process.process import *
from trios.process import *
dirfig = os.path.abspath('/DATA/projet/gilerson/fig')
awr = awr_process()
......
Metadata-Version: 1.0
Name: trios
Version: 1.1.1
Summary: Package to help process TriOS radiometer data for various above-water or in-water setups
Summary: Package to help trios TriOS radiometer data for various above-water or in-water setups
Home-page: https://gitlab.irstea.fr/ETL-TELQUEL/etl/tree/dev/preprocessing/trios
Author: T. Harmel
Author-email: tristan.harmel@ntymail.com
......
......@@ -2,14 +2,17 @@ README.md
setup.py
aux/__init__.py
aux/sensors_spec.py
process/__init__.py
process/process.py
process/process_compar_awr.py
process/process_sabine.py
process/process_test_setup.py
process/run_iwr.py
simulation/__init__.py
simulation/rho_snapshot.py
trios/__init__.py
trios/config.py
trios/process.py
trios/process_compar_awr.py
trios/process_sabine.py
trios/process_test_setup.py
trios/run_awr.py
trios/run_iwr.py
trios/run_swr.py
trios.egg-info/PKG-INFO
trios.egg-info/SOURCES.txt
trios.egg-info/dependency_links.txt
......
aux
build
process
simulation
trios
utils
visu
'''where you can set absolute and relative path used in the package'''
import os
import numpy as np
root = os.path.dirname(os.path.abspath(__file__))
M2015_file = os.path.join(root, '../aux/rhoTable_Mobley2015.csv')
M1999_file = os.path.join(root, '../aux/rhoTable_Mobley1999.csv')
rhosoaa_fine_file = os.path.join(root, '../aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv')
rhosoaa_coarse_file = os.path.join(root, '../aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv')
iopw_file = os.path.join(root, '../aux/water_coef.txt')
F0_file = os.path.join(root, '../aux/Thuillier_2003_0.3nm.dat')
# set common wavelengths on which spectra are reprojected
# default 320nm to 950 nm each wl_step nm
wl_step = 3
wl_common = np.arange(320,950+wl_step,wl_step)
import numpy as np
import pandas as pd
from scipy import interpolate, integrate
from scipy.optimize import curve_fit
import plotly.plotly as py
# import plotly.graph_objs as go
from plotly.graph_objs import *
from utils.utils import reshape as r
import utils.auxdata as ua
from config import *
from trios.config import *
class awr_process:
......
......@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
import utils.auxdata as ua
from process.process import *
from trios.process import *
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
coords = pd.read_csv(coordf, sep=';')
......
......@@ -12,7 +12,7 @@ import plotly.graph_objs as go
import cmocean
import utils.utils as u
from process.process import *
from trios.process import *
from utils.sunposition import sunpos
dir = '/DATA/OBS2CO/data/sabine/data/raw'
......
......@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
from process.process import *
from trios.process import *
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
......
import os
import pandas as pd
import numpy as np
import glob
import re
import datetime
from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
import utils.auxdata as ua
from trios.process import *
odir = os.path.abspath('/DATA/OBS2CO/data/trios/above_water')
dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ_test.csv")[0]
coords = pd.read_csv(coordf, sep=';')
coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
coords['h_debut']=coords['Date_prel'] + pd.to_timedelta(coords['h_debut'])
coords['h_fin']=coords['Date_prel'] + pd.to_timedelta(coords['h_fin'])
# TODO warning: time is set as start_time + 15 minutes (can be more accurate)
coords['Date_prel']= coords['h_debut']+datetime.timedelta(minutes = 15)
iopw = ua.iopw()
iopw.load_iopw()
def add_curve(ax, x, mean, std, c='red', label=''):
ax.plot(x, mean, linestyle='solid', c=c, lw=2.5,
alpha=0.8, label=label)
ax.fill_between(x,
mean - std,
mean + std, alpha=0.35, color=c)
idpr = '167'
# get idpr numbers
idprs = np.unique([re.findall(r'idpr(\d+)', x)[0] for x in awrfiles])
#idprs = np.array(['170'])
# loop over idpr
for idpr in idprs:
c = coords[coords.ID_prel == int(idpr)] # .values[0]
date=c['Date_prel'].dt.strftime('%Y%m%d')
lat = c['Lat'].values[0]
lon = c['Lon'].values[0]
alt = 0 # c['Altitude']
name = c['ID_lac'].values[0]
ofile=os.path.join(odir,'Rrs_awr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
header = c.stack(dropna=False)
header.index = header.index.droplevel()
header.to_csv(ofile, header=None)
# -----------------------------------------------
# AWR processing
# -----------------------------------------------
azi = 135
vza = 40
awr = u.awr_data(idpr, awrfiles)
if awr.Edf:
index_idx = [0]
d = u.data(index_idx)
Ed, wl_Ed = d.load_csv(awr.Edf)
Lsky, wl_Lsky = d.load_csv(awr.Lskyf)
Lt, wl_Lt = d.load_csv(awr.Ltf)
# ''' interpolate Ed and Lsky data upon Lt wavelength'''
wl = wl_Lt
Lt.columns = pd.MultiIndex.from_tuples(zip(['Lt'] * len(wl), wl), names=['param', 'wl'])
intEd = interp1d(wl_Ed, Ed.values, fill_value='extrapolate')(wl)
newEd = pd.DataFrame(index=Ed.index,
columns=pd.MultiIndex.from_tuples(zip(['Ed'] * len(wl), wl), names=['param', 'wl']),
data=intEd)
intLsky = interp1d(wl_Lsky, Lsky.values, fill_value='extrapolate')(wl)
newLsky = pd.DataFrame(index=Lsky.index, columns=pd.MultiIndex.from_tuples(zip(['Lsky'] * len(wl), wl),
names=['param', 'wl']), data=intLsky)
awr = awr_process()
ws = [2]
print(azi, vza)
Lsky = newLsky # .loc[(newLsky.index.get_level_values(1) == vza) & (newLsky.index.get_level_values(2) == azi)]
Ed = newEd # .loc[(newEd.index.get_level_values(1) == vza) & (newEd.index.get_level_values(2) == azi)]
# merge sensor data on time
df = pd.merge_asof(Lt, Ed, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
df = pd.merge_asof(df, Lsky, left_index=True, right_index=True, tolerance=pd.Timedelta("2 seconds"),
direction="nearest")
# add solar angle data and idpr
# compute solar angle (mean between fisrt and last aqcuisition time
df['sza', ''] = np.nan
for index, row in df.iterrows():
# print index
sza = sunpos(index, lat, lon, alt)[1]
df.at[index, 'sza'] = sza
rho_h = awr.get_rho_values([df.sza.mean()], [vza], [azi], wl=wl)
Rrs_h = (df.loc[:, 'Lt'] - rho_h * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
Rrs_stat = Rrs_h.describe()
#Rrs_stat.columns=Rrs_stat.columns.droplevel()
Rrs_stat = Rrs_stat.T
Rrs_stat.to_csv(ofile,mode='a')
#-------------------------------
# for Mobley values :
# rho15 = awr.get_rho_mobley(awr.rhoM2015, [df.sza.mean()], [vza], [azi], [ws])[0]
# rho99 = awr.get_rho_mobley(awr.rhoM1999, [df.sza.mean()], [vza], [azi], [ws])[0]
#
# Rrs15 = (df.loc[:, 'Lt'] - rho15 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
# Rrs99 = (df.loc[:, 'Lt'] - rho99 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
......@@ -15,7 +15,7 @@ import plotly.graph_objs as go
import utils.utils as u
import utils.auxdata as ua
from process.process import *
from trios.process import *
......@@ -32,13 +32,13 @@ class fit:
# ------------------------------------------------
# above-water data files
dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
dirout = os.path.abspath('/DATA/OBS2CO/data/trios/in_water')
odir = os.path.abspath('/DATA/OBS2CO/data/trios/in_water')
iwrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/uw*idpr*.csv")
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
coords = pd.read_csv(coordf, sep=';')
coords
coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
# get idpr numbers
idprs = np.unique([re.findall(r'idpr(\d+)', x)[0] for x in iwrfiles])
......@@ -65,11 +65,17 @@ for idpr in idprs:#[-1:]:
print(idpr)
try:
c = coords[coords.ID_prel == int(idpr)] # .values[0]
date=c['Date_prel'].dt.strftime('%Y%m%d')
lat = c['Lat'].values[0]
lon = c['Lon'].values[0]
alt = 0 # c['Altitude']
name = c['ID_lac'].values[0]
ofile=os.path.join(odir,'Rrs_swr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
header = c.stack()
header.index = header.index.droplevel()
header.to_csv(ofile, header=None)
dff = pd.DataFrame()
# -----------------------------------------------
......@@ -81,7 +87,7 @@ for idpr in idprs:#[-1:]:
reflectance = iwr_process(df, wl_).process()
df = pd.concat([df, reflectance], axis=1)
df.to_csv(os.path.join(dirout, 'trios_iwr_' + name + '_idpr' + idpr + '.csv'))
#df.to_csv(os.path.join(odir, 'trios_iwr_' + name + '_idpr' + idpr + '.csv'))
mean = df.groupby('rounded_depth').mean()
median = df.groupby('rounded_depth').median()
......
import base64
import os
import pandas as pd
import numpy as np
import glob
import io
import os
from textwrap import dedent as d
import re
import matplotlib.pyplot as plt
import plotly
import plotly.graph_objs as go
import datetime
from scipy.interpolate import interp1d
from utils.sunposition import sunpos
import utils.utils as u
import utils.auxdata as ua
from process.process import *
from trios.process import *
plot=False
odir = os.path.abspath('/DATA/OBS2CO/data/trios/surface_water')
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ.csv")[0]
coords = pd.read_csv(coordf, sep=';')
coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
dirfig = os.path.abspath('/DATA/OBS2CO/data/trios/fig')
awrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv")
# awrfiles = glob.glob("/DATA/OBS2CO/data/trios/test_setup/raw/aw*idpr*.csv")
swrfiles = glob.glob("/DATA/OBS2CO/data/trios/raw/Lu0*idpr*.csv")
coordf = glob.glob("/DATA/OBS2CO/data/info/mesures_in_situ_test.csv")[0]
coords = pd.read_csv(coordf, sep=';')
coords['Date_prel']=pd.to_datetime(coords['Date_prel'])
coords['h_debut']=coords['Date_prel'] + pd.to_timedelta(coords['h_debut'])
coords['h_fin']=coords['Date_prel'] + pd.to_timedelta(coords['h_fin'])
# TODO warning: time is set as start_time + 15 minutes (can be more accurate)
coords['Date_prel']= coords['h_debut']+datetime.timedelta(minutes = 15)
iopw = ua.iopw()
iopw.load_iopw()
......@@ -53,13 +51,11 @@ for idpr in idprs:
alt = 0 # c['Altitude']
name = c['ID_lac'].values[0]
ofile=os.path.join(odir,'Rrs_swr_idpr'+idpr+'_'+name+'_'+date.values[0]+'.csv')
header = c.stack()
ofile=os.path.join(odir,'Rrs_swr_'+date.values[0]+'_idpr'+idpr+'_'+name+'.csv')
header = c.stack(dropna=False)
header.index = header.index.droplevel()
header.to_csv(ofile, header=None)
# -----------------------------------------------
# SWR processing
# -----------------------------------------------
......@@ -79,6 +75,8 @@ for idpr in idprs:
Rrs_stat = Rrs_stat.T
Rrs_stat.to_csv(ofile,mode='a')
if plot:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(10, 8))
fig.subplots_adjust(left=0.1, right=0.9, hspace=.5, wspace=0.65)
add_curve(ax, wl_swr, Rrs_swr.transpose().mean(axis=1), Rrs_swr.transpose().std(axis=1), label='swr', c='black')
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment