En raison du déménagement des baies serveurs, les services gitlab.irstea.fr et mattermost.irstea.fr seront interrompus le samedi 2 octobre 2021 au matin. Ils devraient revenir à la normale dans la journée.

Commit d84a498f authored by Harmel Tristan's avatar Harmel Tristan
Browse files

fix compatibility of scalar, vector, matrix rho values according to the chosen method

parent fa2140c0
......@@ -13,6 +13,9 @@ from statsmodels.sandbox.regression.predstd import wls_prediction_std
from scipy import odr as odr
from scipy import stats as scistats
import RTxploitation as rt
iopw = rt.auxdata.iopw().get_iopw
opj= os.path.join
idir = os.path.abspath('/DATA/OBS2CO/data/rogerio')
figdir = opj(idir,'fig')
......@@ -39,10 +42,24 @@ doc_sd = data.iloc[:, 16]
def Rrs2rrs(Rrs):
return Rrs / (0.52 + 1.7 * Rrs)
def black_water_model(B, x):
def inv_gordon88(rrs,g0=0.089,g1=0.125):
deltas = g0**2 + 4 * g1 * rrs
return (-g0 + np.sqrt(deltas)) / (2*g1)
def black_water_model_old(B, x , wl=550):
aw,bbw = iopw(wl)
rrs = Rrs2rrs(x)
u = inv_gordon88(rrs)
return (B[0])**2 * (u - np.abs(B[1]) )
def black_water_model(B, x, wl=550):
rrs = Rrs2rrs(x)
return B[2] * (-B[0]+np.sqrt(B[0]**2+4*B[1]*rrs)) / (2*B[1])
u = inv_gordon88(rrs)
aw,bbw = iopw(wl)
N = (u*(aw+bbw+B[0])-bbw)/ (B[1]- u *(B[1]+B[2]))
return B[3] * N
def linear(B, x):
'''Linear function y = m*x + b'''
......@@ -87,7 +104,8 @@ if param == 'SPM':
# gordon values
g0 = 0.089
g1 = 0.125
model,beta0=black_water_model,[g0,g1,150]
model,beta0=black_water_model_old,[150,10]
#model,beta0=black_water_model,[1,1,1,10]
else:
y=doc
y_sd=doc_sd
......@@ -101,12 +119,13 @@ stats = pd.DataFrame(columns=['band','algo','b0','b1','b2','sig_b0','sig_b1','si
# plot Rrs = f(SPM)
wls = [560, 665, 704,740, 782, 864]
fig, axs = plt.subplots(nrows=2, ncols=3, figsize=(20, 12))
for i, ax in enumerate(axs.ravel()):
print(i)
wl=wls[i]
Rrs = data.iloc[:, 2 * i + 2]
print(Rrs.name)
print(Rrs.name,wl)
Rrs_sd = data.iloc[:, 2 * i + 3]
x,x_sd = Rrs, Rrs_sd
......@@ -119,7 +138,7 @@ for i, ax in enumerate(axs.ravel()):
ax.set_ylabel(ylabel)
testdata = odr.RealData(x, y, sx=x_sd, sy=y_sd)
_odr = odr.ODR(testdata, odr.Model(model), beta0=beta0)
_odr = odr.ODR(testdata, odr.Model(model, extra_args=[wl]), beta0=beta0) #
# fit with ordinary least square (OLS, fit_type=2 )
_odr.set_job(fit_type=2)
......@@ -130,12 +149,12 @@ for i, ax in enumerate(axs.ravel()):
yn = model(res.beta, xn)
fit_up, fit_dw = confidence_interval(xn, model, res)
stats.loc[2*i]=np.concatenate([[Rrs.name, 'OLS'], res.beta, res.sd_beta])
#stats.loc[2*i]=np.concatenate([[Rrs.name, 'OLS'], res.beta, res.sd_beta])
ax.plot(xn, yn, 'b--', label='OLS',#; '+fit_eq.format(*res.beta),
linewidth=2)
#ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="b") # ,label="1-sigma interval")
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="b") # ,label="1-sigma interval")
# fit with Orthogonal distance regression (ODR, fit_type = 0)
_odr.set_job(fit_type=0)
......@@ -144,17 +163,18 @@ for i, ax in enumerate(axs.ravel()):
xn = np.linspace(0, np.max(x) * 1.25, 500)
yn = model(res.beta, xn)
fit_up, fit_dw = confidence_interval(xn, model, res)
stats.loc[2*i+1]=np.concatenate([[Rrs.name, 'ODR'], res.beta, res.sd_beta])
print()
#stats.loc[2*i+1]=np.concatenate([[Rrs.name, 'ODR'], res.beta, res.sd_beta])
ax.plot(xn, yn, 'r-', label='ODR',#; '+fit_eq.format(*res.beta),
linewidth=2)
#ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="r") # ,label="1-sigma interval")
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="r") # ,label="1-sigma interval")
ax.legend()
ax.set_ylim([0,40])
plt.suptitle('Fit based on modified Gordon model')
stats.to_csv(opj(idir,'stats_SPM_Rrs.csv'), float_format='%.2f',index=False)
plt.tight_layout(rect=[0.0, 0.0, 0.99, 0.94])
fig.savefig(opj(figdir,'blackwater_'+param+'_vs_Rrs.png'), dpi=200)
fig.savefig(opj(figdir,'blackwater_'+param+'_vs_Rrs.png'), dpi=300)
plt.show()
plt.close()
......
......@@ -27,7 +27,7 @@ acdom_sd = data.iloc[:, 18]
#remove sparse data of CDOM
data = data.iloc[:,:-2]
params=['SPM','DOC']
param=params[0]
param=params[1]
if param == 'DOC':
data.dropna(inplace=True)
month = data.index.get_level_values(1).month
......@@ -277,7 +277,7 @@ ax.annotate('y = {:.1f}x + {:.1f}\n$r$ = {:.3f}\nNb = {:d}'.format(
a, b, np.corrcoef(x, y)[0,1],len(x)),xy=(0.55,0.75), xycoords='axes fraction',)
plt.tight_layout(rect=[0.0, 0.0, 0.99, 0.985])
fig.savefig(opj(figdir,'DOC_vs_SPM.png'), dpi=200)
fig.savefig(opj(figdir,'DOC_vs_SPM_v2.png'), dpi=600)
plt.show()
......@@ -321,7 +321,7 @@ for i,SPM in enumerate(y.values):
#------------------------------------------------
# multiparametric fit (and 3D plotting)
#------------------------------------------------
plt.ioff()
from matplotlib import cm
import scipy.linalg
......@@ -370,8 +370,9 @@ for i, ax in enumerate(axs.ravel()):
plt.xlabel('DOC')
plt.ylabel('SPM')
ax.set_zlabel('Rrs')
fig.savefig(opj(figdir, 'Rrs_DOC_SPM.png'), dpi=300)
plt.show()
plt.show()
# functions
......
......@@ -4,6 +4,7 @@ import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
plt.ioff()
mpl.rcParams.update({'font.size': 18})
import statsmodels.api as sm
from sklearn.linear_model import LinearRegression
......@@ -53,21 +54,80 @@ def confidence_interval(xn, model, res, nstd=1):
return model(popt_up, xn), model(popt_dw, xn)
models = [(linear, 2)] # (poly,3),,(exponential,2)
# compute ratio B4/B8a and attached uncertainty
data['B4/B8a'] = data['Rrs_sim(B8a)'] / data['Rrs_sim(B4)']
data['SD_B4/B8a'] = (((data.SD_B8a / data['Rrs_sim(B8a)']) ** 2 + (data.SD_B4/ data['Rrs_sim(B4)']) ** 2 -
2*1/(data['Rrs_sim(B8a)']*data['Rrs_sim(B4)'])) * (
data['Rrs_sim(B8a)'] / data['Rrs_sim(B4)']) ** 2) ** 0.5
titles = ['Band 3 (560 nm)', 'Band 4 (665 nm)', 'Band 5 (705 nm)', 'Band 6 (740 nm)', 'Band 7 (783 nm)',
'Band 8a (865 nm)', 'Band 8a / Band 4']
models = [(linear, 2)] # (poly,3),,(exponential,2)0
for model, N in models:
fig, ax = plt.subplots( figsize=(13, 9))
i=6
x = data.iloc[:, 2 * i + 2]
print(x.name)
x_sd = data.iloc[:, 2 * i + 3]
ax.errorbar(x, spm, xerr=x_sd, yerr=spm_sd, fmt='o', color='grey', ecolor='black', alpha=0.8)
ax.set_title(titles[i])
ax.set_xlabel(r'ratio $R_{rs}$')
ax.set_ylabel('SSC (mg/L)')
testdata = odr.RealData(x, spm, sx=x_sd, sy=spm_sd)
_odr = odr.ODR(testdata, odr.Model(model), beta0=[1.] * N)
# fit with ordinary least square (OLS, fit_type=2 )
_odr.set_job(fit_type=2)
res = _odr.run()
res.pprint()
xn = np.linspace(0, np.max(x) * 1.25, 50)
yn = model(res.beta, xn)
fit_up, fit_dw = confidence_interval(xn, model, res)
a, b = res.beta[1], res.beta[0]
ax.plot(xn, yn, 'b--', label='OLS; y = {:.1f}x + {:.1f}\n res_var = {:.1f}'.format(a, b, res.res_var),
linewidth=2)
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="b") # ,label="1-sigma interval")
# fit with Orthogonal distance regression (ODR, fit_type = 0)
_odr.set_job(fit_type=0)
res = _odr.run()
res.pprint()
xn = np.linspace(0, np.max(x) * 1.25, 50)
yn = model(res.beta, xn)
fit_up, fit_dw = confidence_interval(xn, model, res)
a, b = res.beta[1], res.beta[0]
ax.plot(xn, yn, 'r-', label='ODR; y = {:.1f}x + {:.1f}\n res_var = {:.1f}'.format(a, b, res.res_var),
linewidth=2)
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="r") # ,label="1-sigma interval")
ax.legend()
ax.set_ylim([-20, 40])
#ax.set_xlim([0, 1])
# ax.xaxis.set_tick_params(rotation=45)
ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.3f'))
plt.tight_layout(rect=[0.0, 0.0, 0.99, 0.985])
fig.savefig('/DATA/ARTICLES/rogerio/SPM_vs_ratio_Rrs_B8A_B4_v2021jan.png', dpi=300)
# plot Rrs = f(SPM)
fig, axs = plt.subplots(nrows=2, ncols=3, figsize=(25, 12))
fig, axs = plt.subplots(nrows=2, ncols=3, figsize=(20, 13))
for i, ax in enumerate(axs.ravel()):
print(i)
if i > 6:
ax.set_visible(False)
continue
x = data.iloc[:, 2 * i + 2]
print(x.name)
x_sd = data.iloc[:, 2 * i + 3]
ax.errorbar(x, spm, xerr=x_sd, yerr=spm_sd, fmt='o', color='grey', ecolor='black', alpha=0.8)
ax.set_title(x.name)
ax.set_title(titles[i])
ax.set_xlabel(r'$R_{rs}\ (sr^{-1})$')
ax.set_ylabel('SPM (mg/L)')
ax.set_ylabel('SSC (mg/L)')
testdata = odr.RealData(x, spm, sx=x_sd, sy=spm_sd)
_odr = odr.ODR(testdata, odr.Model(model), beta0=[1.] * N)
......@@ -95,9 +155,12 @@ for model, N in models:
ax.plot(xn, yn, 'r-', label='ODR; y = {:.1f}x + {:.1f}\n res_var = {:.1f}'.format(a, b, res.res_var),
linewidth=2)
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="r") # ,label="1-sigma interval")
ax.legend()
ax.legend(loc='upper left')
ax.set_ylim([0, 40])
# ax.xaxis.set_tick_params(rotation=45)
ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.3f'))
plt.tight_layout(rect=[0.0, 0.0, 0.99, 0.985])
fig.savefig('/DATA/ARTICLES/rogerio/SPM_vs_Rrs.png', dpi=200)
fig.savefig('/DATA/ARTICLES/rogerio/SPM_vs_Rrs_v2021jan.png', dpi=300)
# plot Rrs = f(SPM)
fig, axs = plt.subplots(nrows=2, ncols=3, figsize=(30, 12))
......@@ -138,7 +201,7 @@ for model, N in models:
ax.plot(xn, yn, 'r-', label='ODR; y = {:.1f}x + {:.1f}'.format(a, b), linewidth=2)
ax.fill_between(xn, fit_up, fit_dw, alpha=.25, facecolor="r") # ,label="1-sigma interval")
ax.legend()
mpl.ticker.MaxNLocator(5)
# test ratio
x = data.iloc[:, 2 * 5 + 2] / data.iloc[:, 2 * 1 + 2]
import os
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
plt.ioff()
mpl.rcParams.update({'font.size': 18})
import statsmodels.api as sm
from sklearn.linear_model import LinearRegression
from statsmodels.sandbox.regression.predstd import wls_prediction_std
from scipy import odr as odr
from scipy import stats as scistats
import RTxploitation as rt
iopw = rt.auxdata.iopw().get_iopw
opj= os.path.join
idir = os.path.abspath('/DATA/OBS2CO/data/rogerio')
figdir = opj(idir,'fig')
file = opj(idir,'data/Simulation_Rrs_OSOAA_TSS_COD_aCDOM.xlsx')
data = pd.read_excel(file)
data.sort_values(by="TSS (mg L)", inplace=True)
#data = data.set_index(['Station', 'Date'])
idir = '/DATA/OBS2CO/data/rogerio/data/L2'
stat_file= os.path.join(idir, 'all/rho_osoaa_stat.csv')
df = pd.read_csv(stat_file, index_col=[0], parse_dates=True)
df.sort_values(['date','ID','method','wl'],inplace=True)
df_ave=df.groupby("wl").mean()
df_q05 = df[["wl","0.5"]].groupby("wl").quantile(q=0.05)
df_q95 = df[["wl","0.5"]].groupby("wl").quantile(0.95)
plt.figure(figsize=(10,6))
plt.plot(df_ave.index, df_ave["0.5"], '--k', lw=1.5)
#plt.scatter(df.wl,df['0.5'])
plt.fill_between(df_q05.index, df_q05["0.5"], df_q95["0.5"], alpha=.25, facecolor="r")
plt.ylabel('Reflectance factor')
plt.xlabel('Wavelength (nm)')
plt.tight_layout()
plt.savefig(opj(figdir,'rho_values.png'),dpi=300)
\ No newline at end of file
......@@ -235,6 +235,27 @@ def post_process(create_stat_file=False):
df = pd.read_csv(stat_file, index_col=[0], parse_dates=True)
df.sort_values(['date','ID','method','wl'],inplace=True)
#save rho values
odf =pd.DataFrame()
method='osoaa'
files = glob.glob(os.path.join(idir, method)+'/2*[0-9]/Rrs*csv')
for file in files:
info=file.split('_')
ID = info[-2].replace('idpr','')
if method == 'temp_opt':
ID = info[-3].replace('idpr','')
df = pd.read_csv(file, header=[0, 1], index_col=0, parse_dates=True)
date = df.index.mean()
rho = df.rho.quantile([0.5]).T
wl=rho.index.values
rho.index = pd.MultiIndex.from_product([[date],[ID], [method],wl],names=['date','ID','method','wl'])
odf = pd.concat([odf,rho])
# save data file
odf.to_csv(os.path.join(idir, 'all/rho_osoaa_stat.csv'))
import matplotlib.pyplot as plt
......@@ -267,7 +288,7 @@ def post_process(create_stat_file=False):
# plot_map(coords)
# generate_database_info(idirs,infofile)
method = 'M99'
method = 'osoaa'
process(infofile, method=method, ncore=10)
for method in ['M99','osoaa','temp_opt']:
......
import os, sys
import pandas as pd
import numpy as np
import xarray as xr
import glob
import io
import matplotlib as mpl
import matplotlib.pyplot as plt
import plotly
import plotly.graph_objs as go
import cmocean
import scipy.optimize as so
from scipy.interpolate import interp1d
from trios.process import *
from trios.utils.sunposition import sunpos
opj = os.path.join
dir = '/DATA/projet/bagre'
dirdata = opj(dir, 'data')
aerosols = ['fine', 'coarse']
aerosol = aerosols[1]
method = 'osoaa_' + aerosol
odir = opj(dirdata, 'L2', method)
files = glob.glob(opj(dirdata, 'raw/trios/csv_QC', 'trios*.csv'))
if not os.path.exists(odir):
os.makedirs(odir)
# (for raw data: 83b0 = Lu, 853e = Ed, 83ae = Ld40va, 855dLd50va).
# sensors are pointing to a 240º azimuth (cw north).
# TODO, redo compuation for wl <400nm
# project spectral data on common wavelength set (i.e., wl_common defined in trios.config)
# here we change wl_common to stay within interpolation range (i.e., 410-1025 nm)
# wl_common = wl_common[wl_common>=410]
def format_Rrs(Rrs, df, name='Rrs'):
Rrs = Rrs.to_pandas()
Rrs.index = df.index
Rrs.columns = pd.MultiIndex.from_product([[name], df.Lt.columns, ])
return Rrs
awr = awr_process(aerosol=aerosol)
vza = 40
ws = 2
aot550 = 0.1
rho = awr.rho.rho.to_xarray()
rho_ = rho.interp(vza=vza, wind=ws, aot=aot550, wl=wl_common)
rho_M99 = awr.rhoM1999.to_xarray().interp(vza=vza, wind=ws)
rho_M99 = rho_M99.interp(sza=np.linspace(0, 80, 81), method='cubic').interp(azi=np.linspace(0, 180, 181),
method='cubic')
for file in files:
basename = os.path.basename(file)
df = pd.read_csv(file, header=[0, 1], index_col=0, parse_dates=True)
print(df.shape)
lat = df.lat.values[0][0]
lon = df.lon.values[0][0]
alt = 0
df['sza', ''] = np.nan
df['azi', ''] = np.nan
geom = sunpos(df.index.to_pydatetime(), lat, lon, alt)[:, 0:2]
df.at[:, 'sza'] = geom[:, 1]
relazi = 135 # ( geom[:,0] - azi_sensor)% 360
# to get values between 0 and 180°
# since rho values are symmetrical with the principal plane
# relazi[relazi>180] = 360-relazi[relazi>180]
df.at[:, 'azi'] = relazi
print(basename)
for name, raw in df.resample('1d'):
print(name)
# try:
suff = name.__str__().replace(' ', '_')
N = len(raw.index)
if not N:
print('No data for %s'.format(file))
continue
# ------------------
# filtering
# ------------------
# daylight data
ind = (raw.sza < 80) & (raw.Lt.mean(axis=1) > 1) & (raw.Lsky.mean(axis=1) < 300 )
if not any(ind):
print('No QA checked data for ',file)
continue
# TODO add time rolling filters
# ind = awr.filtering(raw.Lt, raw.Lsky, raw.Ed)
clean = raw[ind].__deepcopy__()
Lt, Lsky, Ed, sza, azi = clean.Lt.values, clean.Lsky.values, clean.Ed.values, clean.sza.values, clean.azi.values
sza_ = xr.DataArray(sza, dims='geom')
azi_ = xr.DataArray(azi, dims='geom')
# -----------------------------
# data processing
# -----------------------------
rho_v = rho_.interp(sza=sza_, azi=azi_).T
rho_M99_ = rho_M99.interp(sza=sza_, azi=azi_).to_array().T.values
clean['rho', ''] = rho_v.mean(axis=1)
clean['rho_min', ''] = rho_v.min(axis=1)
clean['rho_max', ''] = rho_v.max(axis=1)
clean['rho_M99', ''] = rho_M99_
Lsurf = (rho_v * Lsky)
Lsurf_M99 = rho_M99_ * Lsky
Rrs = format_Rrs((Lt - Lsurf) / clean.Ed, clean, 'Rrs_' + method)
Rrs_M99 = (Lt - Lsurf_M99) / clean.Ed
Rrs_M99.columns = pd.MultiIndex.from_product([['Rrs_M99'], Rrs_M99.columns, ])
clean = pd.concat([Rrs, Rrs_M99, clean], axis=1)
clean.to_csv(opj(odir, basename.split('.')[0]+'_L2_' + method + '_' + suff + '.csv'))
# Rrs, rho = awr.process_wrapper(wl_common, raw, raw.sza, ws=ws, vza = [vza] * N, azi=raw.azi)
#
# awr.get_rho_values(raw.sza-90,[vza]*N, raw.azi.values, ws=[2])
#
# rho = awr.get_rho_mobley(awr.rhoM1999,raw.sza-90,[vza]*N, azi=raw.azi, ws=[2])[0,:,0]
import os, sys
import pandas as pd
import numpy as np
import xarray as xr
import glob
import io
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
plt.ioff()
plt.rcParams.update({'font.size': 16})
plot = False
opj = os.path.join
dir = '/DATA/projet/bagre'
dirdata = opj(dir, 'data')
aerosols = ['fine', 'coarse']
aerosol = aerosols[1]
method = 'osoaa_' + aerosol
figdir = opj(dir, 'fig/trios/')
files = glob.glob(opj(dirdata, 'L2', method, 'trios*.csv'))
def scat_angle(sza, vza, azi):
'''
self.azi: azimuth in rad for convention azi=0 when sun-sensenor in opposition
:return: scattering angle in deg
'''
print(sza, vza, azi)
sza = np.pi / 180 * sza
vza = np.pi / 180 * vza
azi = np.pi / 180 * azi
ang = -np.cos(sza) * np.cos(vza) + np.sin(sza) * np.sin(vza) * np.cos(azi)
# ang = np.cos(np.pi - sza) * np.cos(vza) - np.sin(np.pi - sza) * np.sin(vza) * np.cos(azi)
ang = np.arccos(ang) * 180 / np.pi
return ang
rho = pd.DataFrame()
for file in files:
# test
df = pd.read_csv(file, header=[0, 1], index_col=0, parse_dates=True)
rho = pd.concat([rho, df.loc[:, ['rho', 'rho_M99', 'sza', 'azi']]])
rho = rho.sort_index()
rho.columns = rho.columns.droplevel(1)
rho['doy'] = rho.index.dayofyear
rho['hour'] = rho.index.hour + rho.index.minute / 60
if plot:
import seaborn as sns
colors = rho.doy.unique()
palette = dict(zip(colors,
sns.color_palette("rocket_r", len(colors))))
fig, axs = plt.subplots(ncols=2, figsize=(16, 6))
sns.scatterplot('hour', 'rho', hue='doy', data=rho, alpha=0.5, edgecolor="none", ax=axs[0], palette=palette)
sns.scatterplot('hour', 'rho_M99', hue='doy', data=rho, marker="+", alpha=0.5, ax=axs[0], legend=None)
axs[0].set_xlabel('Time of day (UTC)')
axs[0].set_ylabel('Reflection factor')
plt.legend(fontsize=11)
sns.scatterplot('hour', 'sza', hue='doy', data=rho, edgecolor="none", palette=palette, alpha=0.5, ax=axs[1],
legend=None)
# sns.scatterplot('hour', 'azi', hue='doy', data=rho, edgecolor="none", palette=palette, alpha=0.5, ax=axs[1], legend=None)
axs[1].set_xlabel('Time of day (UTC)')
axs[1].set_ylabel('Solar zenith angle (deg)')
plt.savefig(opj(dir, 'fig', method + '_rho_values.png'))
# Rrs_basic = (df.Lt-0.028*df
# Rrs_basic.columns=pd.MultiInde.Lsky)/df.Ed
# wl = Rrs_basic.columnsx.from_product([['Rrs_basic'],wl])
#
# rel_diff = (Rrs_basic -df.Rrs)/df.Rrs
# rel_diff.columns=pd.MultiIndex.from_product([['rel_diff'],wl])
# rel_diff[np.abs(rel_diff)> 10]=np.nan
# df_tot = df.join(Rrs_basic).join(rel_diff)
# df_tot.to_csv(file.replace('.csv','_test.csv'))
fig1, ax1 = plt.subplots(figsize=(8, 5))
# test on BRDF vs Rrs
norm1 = mpl.colors.Normalize(vmin=0, vmax=90)
cmap1 = plt.cm.nipy_spectral
sm1 = mpl.cm.ScalarMappable(norm=norm1, cmap=cmap1)
var = 'Rrs_osoaa_coarse'
outputs = pd.DataFrame()
for file in files:
# test
basename = os.path.basename(file)
ID = basename.split('_')[2]
df = pd.read_csv(file, header=[0, 1], index_col=0, parse_dates=True)
df.reset_index(inplace=True)
var_median = df[var].median()
qmax = df[var].quantile(.68)
qmin = df[var].quantile(0.05)
_mean = df[var].mean(axis=1)
_qmax = df[var].mean(axis=1).quantile(.68)
_qmin = df[var].mean(axis=1).quantile(0.05)
#(df[var] > qmin) & (df[var] < qmax)
dffiltered = df[(_mean > _qmin) & (_mean < _qmax)]
var_std = dffiltered[var].std()
var_mean = dffiltered[var].mean()
_df = dffiltered.mean()
_df['date']=dffiltered.date.mean()
_df['ID'] = ID