Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ext
insitu
trios
Commits
c9a02a96
Commit
c9a02a96
authored
Mar 12, 2019
by
Harmel Tristan
Browse files
reorganize package trios and reproject all data on common wavelengths (set in config.py)
parent
6b4880fc
Changes
23
Hide whitespace changes
Inline
Side-by-side
build/lib/process/process.py
View file @
c9a02a96
...
@@ -9,7 +9,7 @@ from plotly.graph_objs import *
...
@@ -9,7 +9,7 @@ from plotly.graph_objs import *
from
utils.utils
import
reshape
as
r
from
utils.utils
import
reshape
as
r
import
utils.auxdata
as
ua
import
utils.auxdata
as
ua
from
config
import
*
from
..
config
import
*
class
awr_process
:
class
awr_process
:
...
...
build/lib/process/process_compar_awr.py
View file @
c9a02a96
...
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
...
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
import
utils.utils
as
u
import
utils.utils
as
u
import
utils.auxdata
as
ua
import
utils.auxdata
as
ua
from
proces
s.process
import
*
from
trio
s.process
import
*
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coords
=
pd
.
read_csv
(
coordf
,
sep
=
';'
)
coords
=
pd
.
read_csv
(
coordf
,
sep
=
';'
)
...
...
build/lib/process/process_test_setup.py
View file @
c9a02a96
...
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
...
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
import
utils.utils
as
u
import
utils.utils
as
u
from
proces
s.process
import
*
from
trio
s.process
import
*
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
...
...
build/lib/utils/utils.py
View file @
c9a02a96
...
@@ -3,7 +3,7 @@ import numpy as np
...
@@ -3,7 +3,7 @@ import numpy as np
from
scipy.interpolate
import
interp1d
from
scipy.interpolate
import
interp1d
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
from
trios.config
import
*
class
awr_data
:
class
awr_data
:
'''
'''
...
@@ -46,18 +46,23 @@ class awr_data:
...
@@ -46,18 +46,23 @@ class awr_data:
Lsky
,
wl_Lsky
=
d
.
load_csv
(
self
.
Lskyf
)
Lsky
,
wl_Lsky
=
d
.
load_csv
(
self
.
Lskyf
)
Lt
,
wl_Lt
=
d
.
load_csv
(
self
.
Ltf
)
Lt
,
wl_Lt
=
d
.
load_csv
(
self
.
Ltf
)
# ''' interpolate Ed and Lsky data upon
Lt
wavelength'''
# ''' interpolate Ed
, Lt
and Lsky data upon
common
wavelength'''
wl
=
wl_
Lt
wl
=
wl_
common
Lt
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lt'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
])
Lt
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lt'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
])
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
,
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
),
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
names
=
[
'param'
,
'wl'
]),
data
=
intEd
)
data
=
intEd
)
intLt
=
interp1d
(
wl_Lt
,
Lt
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newLt
=
pd
.
DataFrame
(
index
=
Lt
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lt'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intLt
)
intLsky
=
interp1d
(
wl_Lsky
,
Lsky
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
intLsky
=
interp1d
(
wl_Lsky
,
Lsky
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newLsky
=
pd
.
DataFrame
(
index
=
Lsky
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lsky'
]
*
len
(
wl
),
wl
),
newLsky
=
pd
.
DataFrame
(
index
=
Lsky
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lsky'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intLsky
)
names
=
[
'param'
,
'wl'
]),
data
=
intLsky
)
# merge sensor data on time
# merge sensor data on time
df
=
pd
.
merge_asof
(
Lt
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
df
=
pd
.
merge_asof
(
new
Lt
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
direction
=
"nearest"
)
df
=
pd
.
merge_asof
(
df
,
newLsky
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
df
=
pd
.
merge_asof
(
df
,
newLsky
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
direction
=
"nearest"
)
...
@@ -126,32 +131,36 @@ class iwr_data:
...
@@ -126,32 +131,36 @@ class iwr_data:
# Edz.mask(Edz<0,inplace=True)
# Edz.mask(Edz<0,inplace=True)
# Luz.mask(Luz<0,inplace=True)
# Luz.mask(Luz<0,inplace=True)
# copy depth data to Ed frame on date index
# copy depth data to Ed frame on date index
# Ed.index = Ed.index.droplevel(level=1)
# Ed.index = Ed.index.droplevel(level=1)
#''' interpolate Ed and L
sky
data upon
Lt
wavelength'''
#''' interpolate Ed
, Edz
and L
uz
data upon
common
wavelength'''
wl
=
wl_
Luz
wl
=
wl_
common
Luz
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Luz'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
])
Luz
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Luz'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
])
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
.
get_level_values
(
0
),
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
.
get_level_values
(
0
),
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
]),
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
]),
data
=
intEd
)
data
=
intEd
)
intEdz
=
interp1d
(
wl_Edz
,
Edz
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
intEdz
=
interp1d
(
wl_Edz
,
Edz
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newEdz
=
pd
.
DataFrame
(
index
=
Edz
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Edz'
]
*
len
(
wl
),
wl
)),
newEdz
=
pd
.
DataFrame
(
index
=
Edz
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Edz'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
]),
data
=
intEdz
)
names
=
[
'param'
,
'wl'
]),
data
=
intEdz
)
intLuz
=
interp1d
(
wl_Luz
,
Luz
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newLuz
=
pd
.
DataFrame
(
index
=
Luz
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
list
(
zip
([
'Luz'
]
*
len
(
wl
),
wl
)),
names
=
[
'param'
,
'wl'
]),
data
=
intLuz
)
# correct depth data for sensor to sensor distance
# correct depth data for sensor to sensor distance
Luz
.
reset_index
(
level
=
1
,
inplace
=
True
)
new
Luz
.
reset_index
(
level
=
1
,
inplace
=
True
)
Luz
.
iloc
[:,
0
]
=
Luz
.
iloc
[:,
0
]
+
delta_Lu_depth
new
Luz
.
iloc
[:,
0
]
=
Luz
.
iloc
[:,
0
]
+
delta_Lu_depth
# newEd.reset_index(level=1,inplace=True)
# newEd.reset_index(level=1,inplace=True)
newEdz
.
reset_index
(
level
=
1
,
inplace
=
True
)
newEdz
.
reset_index
(
level
=
1
,
inplace
=
True
)
newEdz
.
iloc
[:,
0
]
=
newEdz
.
iloc
[:,
0
]
+
delta_Edz_depth
newEdz
.
iloc
[:,
0
]
=
newEdz
.
iloc
[:,
0
]
+
delta_Edz_depth
# merge sensor data on time
# merge sensor data on time
df
=
pd
.
merge_asof
(
Luz
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
df
=
pd
.
merge_asof
(
new
Luz
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
direction
=
"nearest"
)
df
=
pd
.
merge_asof
(
df
,
newEdz
,
left_index
=
True
,
right_index
=
True
,
suffixes
=
(
'_Luz'
,
'_Edz'
),
df
=
pd
.
merge_asof
(
df
,
newEdz
,
left_index
=
True
,
right_index
=
True
,
suffixes
=
(
'_Luz'
,
'_Edz'
),
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
...
@@ -174,7 +183,7 @@ class iwr_data:
...
@@ -174,7 +183,7 @@ class iwr_data:
#
#
# dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
# dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
# if len(file) > 1:
# if len(file) > 1:
# print('Warning! Multiple files found but only one expected,
proces
s first file of the list:')
# print('Warning! Multiple files found but only one expected,
trio
s first file of the list:')
# print(file)
# print(file)
# file = file[0]
# file = file[0]
# df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
# df = pd.read_csv(file, sep=';', index_col=[1, 0], na_values=['-NAN'])
...
@@ -225,16 +234,19 @@ class swr_data:
...
@@ -225,16 +234,19 @@ class swr_data:
Ed
,
wl_Ed
=
data
().
load_csv
(
self
.
Edf
)
Ed
,
wl_Ed
=
data
().
load_csv
(
self
.
Edf
)
Lu0
,
wl_Lu0
=
data
().
load_csv
(
self
.
Lu0f
)
Lu0
,
wl_Lu0
=
data
().
load_csv
(
self
.
Lu0f
)
# ''' interpolate Ed and Lsky data upon
Lt
wavelength'''
# ''' interpolate Ed and Lsky data upon
common
wavelength
s
'''
wl
=
wl_
Lu0
wl
=
wl_
common
Lu0
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lu0+'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
])
Lu0
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lu0+'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
])
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
,
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intEd
)
data
=
intEd
)
intLu0
=
interp1d
(
wl_Lu0
,
Lu0
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newLu0
=
pd
.
DataFrame
(
index
=
Lu0
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lu0'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intLu0
)
# merge sensor data on time
# merge sensor data on time
df
=
pd
.
merge_asof
(
Lu0
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
df
=
pd
.
merge_asof
(
new
Lu0
,
newEd
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
direction
=
"nearest"
)
# add solar angle data and idpr
# add solar angle data and idpr
...
@@ -261,7 +273,7 @@ class data:
...
@@ -261,7 +273,7 @@ class data:
print
(
file
)
print
(
file
)
dateparse
=
lambda
x
:
pd
.
datetime
.
strptime
(
x
,
'%Y-%m-%d %H:%M:%S'
)
dateparse
=
lambda
x
:
pd
.
datetime
.
strptime
(
x
,
'%Y-%m-%d %H:%M:%S'
)
if
len
(
file
)
>
1
:
if
len
(
file
)
>
1
:
print
(
'Warning! Multiple files found but only one expected,
proces
s first file of the list:'
)
print
(
'Warning! Multiple files found but only one expected,
trio
s first file of the list:'
)
print
(
file
)
print
(
file
)
file_
=
file
[
0
]
file_
=
file
[
0
]
# df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])
# df = pd.read_csv(file, date_parser=dateparse, sep=';', index_col=0, na_values=['-NAN'])
...
...
config.py
deleted
100644 → 0
View file @
6b4880fc
'''where you can set absolute and relative path used in the package'''
import
os
root
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
M2015_file
=
os
.
path
.
join
(
root
,
'aux/rhoTable_Mobley2015.csv'
)
M1999_file
=
os
.
path
.
join
(
root
,
'aux/rhoTable_Mobley1999.csv'
)
rhosoaa_fine_file
=
os
.
path
.
join
(
root
,
'aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv'
)
rhosoaa_coarse_file
=
os
.
path
.
join
(
root
,
'aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv'
)
iopw_file
=
os
.
path
.
join
(
root
,
'aux/water_coef.txt'
)
F0_file
=
os
.
path
.
join
(
root
,
'aux/Thuillier_2003_0.3nm.dat'
)
main.py
View file @
c9a02a96
...
@@ -11,7 +11,7 @@ import plotly
...
@@ -11,7 +11,7 @@ import plotly
import
plotly.graph_objs
as
go
import
plotly.graph_objs
as
go
import
utils.utils
as
u
import
utils.utils
as
u
from
proces
s.process
import
*
from
trio
s.process
import
*
# import aeronet
# import aeronet
# from config import *
# from config import *
...
@@ -83,7 +83,7 @@ for idpr in idprs:
...
@@ -83,7 +83,7 @@ for idpr in idprs:
# iwr = u.iwr_data(idpr, iwrfiles)
# iwr = u.iwr_data(idpr, iwrfiles)
# if iwr.file:
# if iwr.file:
# df, wl = iwr.reader(c[1], c[2], c[3])
# df, wl = iwr.reader(c[1], c[2], c[3])
# Rrs = iwr_process(df, wl).
proces
s()
# Rrs = iwr_process(df, wl).
trio
s()
# dff = pd.concat([dff, Rrs], axis=1)
# dff = pd.concat([dff, Rrs], axis=1)
# writing output file
# writing output file
...
...
setup.py
View file @
c9a02a96
...
@@ -4,7 +4,7 @@ from grs import __version__
...
@@ -4,7 +4,7 @@ from grs import __version__
setup
(
setup
(
name
=
'trios'
,
name
=
'trios'
,
version
=
__version__
,
version
=
__version__
,
packages
=
find_packages
(),
packages
=
find_packages
(
exclude
=
[
'build'
]
),
package_data
=
{
''
:
[
'*.so'
]},
package_data
=
{
''
:
[
'*.so'
]},
# # If any package contains *.txt files, include them:
# # If any package contains *.txt files, include them:
# '': ['*.txt'],
# '': ['*.txt'],
...
@@ -17,7 +17,7 @@ setup(
...
@@ -17,7 +17,7 @@ setup(
license
=
'MIT'
,
license
=
'MIT'
,
author
=
'T. Harmel'
,
author
=
'T. Harmel'
,
author_email
=
'tristan.harmel@ntymail.com'
,
author_email
=
'tristan.harmel@ntymail.com'
,
description
=
'Package to help
proces
s TriOS radiometer data for various above-water or in-water setups'
,
description
=
'Package to help
trio
s TriOS radiometer data for various above-water or in-water setups'
,
# TODO update Dependent packages (distributions)
# TODO update Dependent packages (distributions)
install_requires
=
[
'dash'
,
'dash_core_components'
,
'dash_html_components'
,
'pandas'
,
'scipy'
,
'numpy'
,
'netCDF4'
,
'matplotlib'
,
'docopt'
,
'GDAL'
,
'python-dateutil'
],
install_requires
=
[
'dash'
,
'dash_core_components'
,
'dash_html_components'
,
'pandas'
,
'scipy'
,
'numpy'
,
'netCDF4'
,
'matplotlib'
,
'docopt'
,
'GDAL'
,
'python-dateutil'
],
...
...
simulation/rho_snapshot.py
View file @
c9a02a96
...
@@ -4,7 +4,7 @@ import numpy as np
...
@@ -4,7 +4,7 @@ import numpy as np
import
cmocean
import
cmocean
import
matplotlib.pyplot
as
plt
import
matplotlib.pyplot
as
plt
from
proces
s.process
import
*
from
trio
s.process
import
*
dirfig
=
os
.
path
.
abspath
(
'/DATA/projet/gilerson/fig'
)
dirfig
=
os
.
path
.
abspath
(
'/DATA/projet/gilerson/fig'
)
awr
=
awr_process
()
awr
=
awr_process
()
...
...
trios.egg-info/PKG-INFO
View file @
c9a02a96
Metadata-Version: 1.0
Metadata-Version: 1.0
Name: trios
Name: trios
Version: 1.1.1
Version: 1.1.1
Summary: Package to help
proces
s TriOS radiometer data for various above-water or in-water setups
Summary: Package to help
trio
s TriOS radiometer data for various above-water or in-water setups
Home-page: https://gitlab.irstea.fr/ETL-TELQUEL/etl/tree/dev/preprocessing/trios
Home-page: https://gitlab.irstea.fr/ETL-TELQUEL/etl/tree/dev/preprocessing/trios
Author: T. Harmel
Author: T. Harmel
Author-email: tristan.harmel@ntymail.com
Author-email: tristan.harmel@ntymail.com
...
...
trios.egg-info/SOURCES.txt
View file @
c9a02a96
...
@@ -2,14 +2,17 @@ README.md
...
@@ -2,14 +2,17 @@ README.md
setup.py
setup.py
aux/__init__.py
aux/__init__.py
aux/sensors_spec.py
aux/sensors_spec.py
process/__init__.py
process/process.py
process/process_compar_awr.py
process/process_sabine.py
process/process_test_setup.py
process/run_iwr.py
simulation/__init__.py
simulation/__init__.py
simulation/rho_snapshot.py
simulation/rho_snapshot.py
trios/__init__.py
trios/config.py
trios/process.py
trios/process_compar_awr.py
trios/process_sabine.py
trios/process_test_setup.py
trios/run_awr.py
trios/run_iwr.py
trios/run_swr.py
trios.egg-info/PKG-INFO
trios.egg-info/PKG-INFO
trios.egg-info/SOURCES.txt
trios.egg-info/SOURCES.txt
trios.egg-info/dependency_links.txt
trios.egg-info/dependency_links.txt
...
...
trios.egg-info/top_level.txt
View file @
c9a02a96
aux
aux
build
build
process
simulation
simulation
trios
utils
utils
visu
visu
proces
s/__init__.py
→
trio
s/__init__.py
View file @
c9a02a96
File moved
trios/config.py
0 → 100644
View file @
c9a02a96
'''where you can set absolute and relative path used in the package'''
import
os
import
numpy
as
np
root
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
M2015_file
=
os
.
path
.
join
(
root
,
'../aux/rhoTable_Mobley2015.csv'
)
M1999_file
=
os
.
path
.
join
(
root
,
'../aux/rhoTable_Mobley1999.csv'
)
rhosoaa_fine_file
=
os
.
path
.
join
(
root
,
'../aux/surface_reflectance_factor_rho_fine_aerosol_rg0.06_sig0.46.csv'
)
rhosoaa_coarse_file
=
os
.
path
.
join
(
root
,
'../aux/surface_reflectance_factor_rho_coarse_aerosol_rg0.60_sig0.60.csv'
)
iopw_file
=
os
.
path
.
join
(
root
,
'../aux/water_coef.txt'
)
F0_file
=
os
.
path
.
join
(
root
,
'../aux/Thuillier_2003_0.3nm.dat'
)
# set common wavelengths on which spectra are reprojected
# default 320nm to 950 nm each wl_step nm
wl_step
=
3
wl_common
=
np
.
arange
(
320
,
950
+
wl_step
,
wl_step
)
proces
s/process.py
→
trio
s/process.py
View file @
c9a02a96
import
numpy
as
np
import
pandas
as
pd
import
pandas
as
pd
from
scipy
import
interpolate
,
integrate
from
scipy
import
interpolate
,
integrate
from
scipy.optimize
import
curve_fit
import
plotly.plotly
as
py
# import plotly.graph_objs as go
# import plotly.graph_objs as go
from
plotly.graph_objs
import
*
from
plotly.graph_objs
import
*
from
utils.utils
import
reshape
as
r
from
utils.utils
import
reshape
as
r
import
utils.auxdata
as
ua
import
utils.auxdata
as
ua
from
config
import
*
from
trios.
config
import
*
class
awr_process
:
class
awr_process
:
...
...
proces
s/process_compar_awr.py
→
trio
s/process_compar_awr.py
View file @
c9a02a96
...
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
...
@@ -14,7 +14,7 @@ from scipy.interpolate import interp1d
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
import
utils.utils
as
u
import
utils.utils
as
u
import
utils.auxdata
as
ua
import
utils.auxdata
as
ua
from
proces
s.process
import
*
from
trio
s.process
import
*
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coords
=
pd
.
read_csv
(
coordf
,
sep
=
';'
)
coords
=
pd
.
read_csv
(
coordf
,
sep
=
';'
)
...
...
proces
s/process_sabine.py
→
trio
s/process_sabine.py
View file @
c9a02a96
...
@@ -12,7 +12,7 @@ import plotly.graph_objs as go
...
@@ -12,7 +12,7 @@ import plotly.graph_objs as go
import
cmocean
import
cmocean
import
utils.utils
as
u
import
utils.utils
as
u
from
proces
s.process
import
*
from
trio
s.process
import
*
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
dir
=
'/DATA/OBS2CO/data/sabine/data/raw'
dir
=
'/DATA/OBS2CO/data/sabine/data/raw'
...
...
proces
s/process_test_setup.py
→
trio
s/process_test_setup.py
View file @
c9a02a96
...
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
...
@@ -13,7 +13,7 @@ from scipy.interpolate import interp1d
from
utils.sunposition
import
sunpos
from
utils.sunposition
import
sunpos
import
utils.utils
as
u
import
utils.utils
as
u
from
proces
s.process
import
*
from
trio
s.process
import
*
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ.csv"
)[
0
]
...
...
trios/run_awr.py
0 → 100644
View file @
c9a02a96
import
os
import
pandas
as
pd
import
numpy
as
np
import
glob
import
re
import
datetime
from
scipy.interpolate
import
interp1d
from
utils.sunposition
import
sunpos
import
utils.utils
as
u
import
utils.auxdata
as
ua
from
trios.process
import
*
odir
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/above_water'
)
dirfig
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/fig'
)
awrfiles
=
glob
.
glob
(
"/DATA/OBS2CO/data/trios/raw/aw*idpr*.csv"
)
coordf
=
glob
.
glob
(
"/DATA/OBS2CO/data/info/mesures_in_situ_test.csv"
)[
0
]
coords
=
pd
.
read_csv
(
coordf
,
sep
=
';'
)
coords
[
'Date_prel'
]
=
pd
.
to_datetime
(
coords
[
'Date_prel'
])
coords
[
'h_debut'
]
=
coords
[
'Date_prel'
]
+
pd
.
to_timedelta
(
coords
[
'h_debut'
])
coords
[
'h_fin'
]
=
coords
[
'Date_prel'
]
+
pd
.
to_timedelta
(
coords
[
'h_fin'
])
# TODO warning: time is set as start_time + 15 minutes (can be more accurate)
coords
[
'Date_prel'
]
=
coords
[
'h_debut'
]
+
datetime
.
timedelta
(
minutes
=
15
)
iopw
=
ua
.
iopw
()
iopw
.
load_iopw
()
def
add_curve
(
ax
,
x
,
mean
,
std
,
c
=
'red'
,
label
=
''
):
ax
.
plot
(
x
,
mean
,
linestyle
=
'solid'
,
c
=
c
,
lw
=
2.5
,
alpha
=
0.8
,
label
=
label
)
ax
.
fill_between
(
x
,
mean
-
std
,
mean
+
std
,
alpha
=
0.35
,
color
=
c
)
idpr
=
'167'
# get idpr numbers
idprs
=
np
.
unique
([
re
.
findall
(
r
'idpr(\d+)'
,
x
)[
0
]
for
x
in
awrfiles
])
#idprs = np.array(['170'])
# loop over idpr
for
idpr
in
idprs
:
c
=
coords
[
coords
.
ID_prel
==
int
(
idpr
)]
# .values[0]
date
=
c
[
'Date_prel'
].
dt
.
strftime
(
'%Y%m%d'
)
lat
=
c
[
'Lat'
].
values
[
0
]
lon
=
c
[
'Lon'
].
values
[
0
]
alt
=
0
# c['Altitude']
name
=
c
[
'ID_lac'
].
values
[
0
]
ofile
=
os
.
path
.
join
(
odir
,
'Rrs_awr_'
+
date
.
values
[
0
]
+
'_idpr'
+
idpr
+
'_'
+
name
+
'.csv'
)
header
=
c
.
stack
(
dropna
=
False
)
header
.
index
=
header
.
index
.
droplevel
()
header
.
to_csv
(
ofile
,
header
=
None
)
# -----------------------------------------------
# AWR processing
# -----------------------------------------------
azi
=
135
vza
=
40
awr
=
u
.
awr_data
(
idpr
,
awrfiles
)
if
awr
.
Edf
:
index_idx
=
[
0
]
d
=
u
.
data
(
index_idx
)
Ed
,
wl_Ed
=
d
.
load_csv
(
awr
.
Edf
)
Lsky
,
wl_Lsky
=
d
.
load_csv
(
awr
.
Lskyf
)
Lt
,
wl_Lt
=
d
.
load_csv
(
awr
.
Ltf
)
# ''' interpolate Ed and Lsky data upon Lt wavelength'''
wl
=
wl_Lt
Lt
.
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lt'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
])
intEd
=
interp1d
(
wl_Ed
,
Ed
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newEd
=
pd
.
DataFrame
(
index
=
Ed
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Ed'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intEd
)
intLsky
=
interp1d
(
wl_Lsky
,
Lsky
.
values
,
fill_value
=
'extrapolate'
)(
wl
)
newLsky
=
pd
.
DataFrame
(
index
=
Lsky
.
index
,
columns
=
pd
.
MultiIndex
.
from_tuples
(
zip
([
'Lsky'
]
*
len
(
wl
),
wl
),
names
=
[
'param'
,
'wl'
]),
data
=
intLsky
)
awr
=
awr_process
()
ws
=
[
2
]
print
(
azi
,
vza
)
Lsky
=
newLsky
# .loc[(newLsky.index.get_level_values(1) == vza) & (newLsky.index.get_level_values(2) == azi)]
Ed
=
newEd
# .loc[(newEd.index.get_level_values(1) == vza) & (newEd.index.get_level_values(2) == azi)]
# merge sensor data on time
df
=
pd
.
merge_asof
(
Lt
,
Ed
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
df
=
pd
.
merge_asof
(
df
,
Lsky
,
left_index
=
True
,
right_index
=
True
,
tolerance
=
pd
.
Timedelta
(
"2 seconds"
),
direction
=
"nearest"
)
# add solar angle data and idpr
# compute solar angle (mean between fisrt and last aqcuisition time
df
[
'sza'
,
''
]
=
np
.
nan
for
index
,
row
in
df
.
iterrows
():
# print index
sza
=
sunpos
(
index
,
lat
,
lon
,
alt
)[
1
]
df
.
at
[
index
,
'sza'
]
=
sza
rho_h
=
awr
.
get_rho_values
([
df
.
sza
.
mean
()],
[
vza
],
[
azi
],
wl
=
wl
)
Rrs_h
=
(
df
.
loc
[:,
'Lt'
]
-
rho_h
*
df
.
loc
[:,
'Lsky'
])
/
df
.
loc
[:,
'Ed'
]
Rrs_stat
=
Rrs_h
.
describe
()
#Rrs_stat.columns=Rrs_stat.columns.droplevel()
Rrs_stat
=
Rrs_stat
.
T
Rrs_stat
.
to_csv
(
ofile
,
mode
=
'a'
)
#-------------------------------
# for Mobley values :
# rho15 = awr.get_rho_mobley(awr.rhoM2015, [df.sza.mean()], [vza], [azi], [ws])[0]
# rho99 = awr.get_rho_mobley(awr.rhoM1999, [df.sza.mean()], [vza], [azi], [ws])[0]
#
# Rrs15 = (df.loc[:, 'Lt'] - rho15 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
# Rrs99 = (df.loc[:, 'Lt'] - rho99 * df.loc[:, 'Lsky']) / df.loc[:, 'Ed']
proces
s/run_iwr.py
→
trio
s/run_iwr.py
View file @
c9a02a96
...
@@ -15,7 +15,7 @@ import plotly.graph_objs as go
...
@@ -15,7 +15,7 @@ import plotly.graph_objs as go
import
utils.utils
as
u
import
utils.utils
as
u
import
utils.auxdata
as
ua
import
utils.auxdata
as
ua
from
proces
s.process
import
*
from
trio
s.process
import
*
...
@@ -32,13 +32,13 @@ class fit:
...
@@ -32,13 +32,13 @@ class fit:
# ------------------------------------------------
# ------------------------------------------------
# above-water data files
# above-water data files
dirfig
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/fig'
)
dirfig
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/fig'
)
dir
out
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/in_water'
)
o
dir
=
os
.
path
.
abspath
(
'/DATA/OBS2CO/data/trios/in_water'
)
iwrfiles
=
glob
.
glob
(
"/DATA/OBS2CO/data/trios/raw/uw*idpr*.csv"
)
iwrfiles
=
glob
.
glob
(
"/DATA/OBS2CO/data/trios/raw/uw*idpr*.csv"
)