diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..26d33521af10bcc7fd8cea344038eaaeb78d0ef5 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/.idea/.name b/.idea/.name new file mode 100644 index 0000000000000000000000000000000000000000..19bf0fff7d4391e29d6165f460d5295933447bef --- /dev/null +++ b/.idea/.name @@ -0,0 +1 @@ +hrudelin_parms_J2000.py \ No newline at end of file diff --git a/.idea/hru-delin-master.iml b/.idea/hru-delin-master.iml new file mode 100644 index 0000000000000000000000000000000000000000..8b8c395472a5a6b3598af42086e590417ace9933 --- /dev/null +++ b/.idea/hru-delin-master.iml @@ -0,0 +1,12 @@ +<?xml version="1.0" encoding="UTF-8"?> +<module type="PYTHON_MODULE" version="4"> + <component name="NewModuleRootManager"> + <content url="file://$MODULE_DIR$" /> + <orderEntry type="inheritedJdk" /> + <orderEntry type="sourceFolder" forTests="false" /> + </component> + <component name="PyDocumentationSettings"> + <option name="format" value="PLAIN" /> + <option name="myDocStringFormat" value="Plain" /> + </component> +</module> \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000000000000000000000000000000000000..5e186e72259dfb678ee76a989c4b0baa74ac0036 --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,30 @@ +<component name="InspectionProjectProfileManager"> + <profile version="1.0"> + <option name="myName" value="Project Default" /> + <inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true"> + <option name="ignoredErrors"> + <list> + <option value="N803" /> + <option value="N802" /> + <option value="N806" /> + </list> + </option> + <option name="ignoredBaseClasses"> + <list> + <option value="unittest.TestCase" /> + <option value="unittest.case.TestCase" /> + <option value="pythonGate.Argument.Argument" /> + </list> + </option> + </inspection_tool> + <inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true"> + <option name="ignoredIdentifiers"> + <list> + <option value="set.__getitem__" /> + <option value="bytes.encode" /> + <option value="dict.__add__" /> + </list> + </option> + </inspection_tool> + </profile> +</component> \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000000000000000000000000000000000000..105ce2da2d6447d11dfe32bfb846c3d5b199fc99 --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ +<component name="InspectionProjectProfileManager"> + <settings> + <option name="USE_PROJECT_PROFILE" value="false" /> + <version value="1.0" /> + </settings> +</component> \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000000000000000000000000000000000000..2a75f480b6975b3cbf14d878de0a951b981ca52a --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8" project-jdk-type="Python SDK" /> + <component name="PythonCompatibilityInspectionAdvertiser"> + <option name="version" value="3" /> + </component> +</project> \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000000000000000000000000000000000000..6f0674e3f4aef75dc26a447da0fad0a8f76fc927 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="ProjectModuleManager"> + <modules> + <module fileurl="file://$PROJECT_DIR$/.idea/hru-delin-master.iml" filepath="$PROJECT_DIR$/.idea/hru-delin-master.iml" /> + </modules> + </component> +</project> \ No newline at end of file diff --git a/.idea/rGraphicsSettings.xml b/.idea/rGraphicsSettings.xml new file mode 100644 index 0000000000000000000000000000000000000000..a32f011ab7e3cf715867ce6b18d7c3f869ec3282 --- /dev/null +++ b/.idea/rGraphicsSettings.xml @@ -0,0 +1,9 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="RGraphicsSettings"> + <option name="height" value="450" /> + <option name="resolution" value="75" /> + <option name="version" value="2" /> + <option name="width" value="800" /> + </component> +</project> \ No newline at end of file diff --git a/.idea/rSettings.xml b/.idea/rSettings.xml new file mode 100644 index 0000000000000000000000000000000000000000..97e6f1f160db88405e9db01b0ae6ba45adbed3f4 --- /dev/null +++ b/.idea/rSettings.xml @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="RSettings"> + <option name="interpreterPath" value="/usr/bin/R" /> + </component> +</project> \ No newline at end of file diff --git a/bin/create_config_file.py b/bin/create_config_file.py index b3c99d31fa26599e17497af06f0e26c369b232b7..5dd0b9df45d8bf2c39a349add295d08726249dca 100644 --- a/bin/create_config_file.py +++ b/bin/create_config_file.py @@ -180,6 +180,10 @@ file.write("OF_domain_export:\n") file.write("[hru_param]\n") file.write("hru_cat:\n") file.write("hru_landuse:\n") +file.write("[irrigation_analysis]\n") +file.write("HRU_file:\n") +file.write("cantons_file:\n") + diff --git a/bin/hru-delin_step2.sh b/bin/hru-delin_step2.sh index a3c01ab276fd4804da3d3a0f1b0650dc3ed2a850..1797ef99c9b8389c889d5ffedbd8c26fe85ddc86 100755 --- a/bin/hru-delin_step2.sh +++ b/bin/hru-delin_step2.sh @@ -113,49 +113,58 @@ fi # ---------------------- # clean work environment # ---------------------- -clean_files=`grep "files\s*:" $CONFIGFILE | cut -d ':' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` -if [ -z "$clean_files" ]; then - clean_files=`grep "files\s*=" $CONFIGFILE | cut -d '=' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` -fi +#clean_files=`grep "files\s*:" $CONFIGFILE | cut -d ':' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` +#if [ -z "$clean_files" ]; then +# clean_files=`grep "files\s*=" $CONFIGFILE | cut -d '=' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` +#fi -if [ -z "$clean_files" ]; then - echo "------------> ERROR : Output FILE Directory not provided !" - exit 1 -fi +#if [ -z "$clean_files" ]; then +# echo "------------> ERROR : Output FILE Directory not provided !" +# exit 1 +#fi # is the path absolute? -if [[ "$clean_files" = /* ]]; then - rm -f $clean_files/step2* $clean_files/step3* -else - rm -f $CONFIGFILEPATH/$clean_files/step2* $CONFIGFILEPATH/$clean_files/step3* -fi - -clean_results=`grep "results\s*:" $CONFIGFILE | cut -d ':' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` -if [ -z "$clean_results" ]; then - clean_results=`grep "results\s*=" $CONFIGFILE | cut -d '=' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` -fi - -if [ -z "$clean_results" ]; then - echo "------------> ERROR : Output RESULTS Directory not provided !" - exit 1 -fi +#if [[ "$clean_files" = /* ]]; then +# rm -f $clean_files/step2* $clean_files/step3* +#else +# rm -f $CONFIGFILEPATH/$clean_files/step2* $CONFIGFILEPATH/$clean_files/step3* +#fi + +#clean_results=`grep "results\s*:" $CONFIGFILE | cut -d ':' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` +#if [ -z "$clean_results" ]; then +# clean_results=`grep "results\s*=" $CONFIGFILE | cut -d '=' -f2 | sed -e 's/\s*$//' | sed -e 's/^\s*//'` +#fi + +#if [ -z "$clean_results" ]; then +# echo "------------> ERROR : Output RESULTS Directory not provided !" +# exit 1 +#fi # is the path absolute? -if [[ "$clean_results" = /* ]]; then - rm -rf $clean_results - mkdir $clean_results -else - rm -rf $CONFIGFILEPATH/$clean_results - mkdir $CONFIGFILEPATH/$clean_results -fi +#if [[ "$clean_results" = /* ]]; then +# rm -rf $clean_results +# mkdir $clean_results +#else +# rm -rf $CONFIGFILEPATH/$clean_results +# mkdir $CONFIGFILEPATH/$clean_results +#fi # test if mkdir works -if [ $? -ne 0 ] ; then - echo "------------> ERROR : Impossible to create Output directory !" - exit 1 -fi +#if [ $? -ne 0 ] ; then +# echo "------------> ERROR : Impossible to create Output directory !" +# exit 1 +#fi # ----------------------------------- # exec second step of HRU-DELIN batch # ----------------------------------- -python3 $MYDIR/../modules/hrudelin_2_basins.py $CONFIGFILE $NBPROCESS +#python3 $MYDIR/../modules/hrudelin_2_basins.py $CONFIGFILE $NBPROCESS + +python3 $MYDIR/../modules/hrudelin_2_1_env_relocate.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_2_derivation_watershed.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_3_watershed_reclassification.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_4_mask.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_5_cut_stream_subbassins.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_6_parrallele.py $CONFIGFILE $NBPROCESS +python3 $MYDIR/../modules/hrudelin_2_7_isolate_pixel.py $CONFIGFILE $NBPROCESS + diff --git a/data_exemple_les_collieres/cantons_irrigues.cpg b/data_exemple_les_collieres/cantons_irrigues.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/data_exemple_les_collieres/cantons_irrigues.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/data_exemple_les_collieres/cantons_irrigues.dbf b/data_exemple_les_collieres/cantons_irrigues.dbf new file mode 100644 index 0000000000000000000000000000000000000000..52ac9ebaf959622ef0c642a4200263ea8da8d76b Binary files /dev/null and b/data_exemple_les_collieres/cantons_irrigues.dbf differ diff --git a/data_exemple_les_collieres/cantons_irrigues.prj b/data_exemple_les_collieres/cantons_irrigues.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/data_exemple_les_collieres/cantons_irrigues.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/data_exemple_les_collieres/cantons_irrigues.shp b/data_exemple_les_collieres/cantons_irrigues.shp new file mode 100644 index 0000000000000000000000000000000000000000..d42a54b78b48b6774e2a6433a7d2e708313e68b0 Binary files /dev/null and b/data_exemple_les_collieres/cantons_irrigues.shp differ diff --git a/data_exemple_les_collieres/cantons_irrigues.shx b/data_exemple_les_collieres/cantons_irrigues.shx new file mode 100644 index 0000000000000000000000000000000000000000..7481fd7e68cd10a547c152d310517edeebc1cd8d Binary files /dev/null and b/data_exemple_les_collieres/cantons_irrigues.shx differ diff --git a/data_exemple_les_collieres/gauges_selected.cpg b/data_exemple_les_collieres/gauges_selected.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected.dbf b/data_exemple_les_collieres/gauges_selected.dbf new file mode 100644 index 0000000000000000000000000000000000000000..4b1f0fd17ac90775a72d731e1d57d186e3d001c9 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected.dbf differ diff --git a/data_exemple_les_collieres/gauges_selected.prj b/data_exemple_les_collieres/gauges_selected.prj new file mode 100644 index 0000000000000000000000000000000000000000..f904335fe5023555790abfa69e50304e1cf3fa1f --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected.prj @@ -0,0 +1 @@ +PROJCS["RGF93_Lambert_93",GEOGCS["GCS_RGF93_geographiques_dms",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",44.0],PARAMETER["Standard_Parallel_2",49.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected.shp b/data_exemple_les_collieres/gauges_selected.shp new file mode 100644 index 0000000000000000000000000000000000000000..9d38007cc242dc5ff9504d4289cdeb484d58e942 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected.shp differ diff --git a/data_exemple_les_collieres/gauges_selected.shx b/data_exemple_les_collieres/gauges_selected.shx new file mode 100644 index 0000000000000000000000000000000000000000..c8d89a65076f6770d08137990ccbe0856149bf49 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected.shx differ diff --git a/data_exemple_les_collieres/gauges_selected_RFG93.cpg b/data_exemple_les_collieres/gauges_selected_RFG93.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected_RFG93.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected_RFG93.dbf b/data_exemple_les_collieres/gauges_selected_RFG93.dbf new file mode 100644 index 0000000000000000000000000000000000000000..149f73ede3e19879796761868620a19547f21324 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_RFG93.dbf differ diff --git a/data_exemple_les_collieres/gauges_selected_RFG93.prj b/data_exemple_les_collieres/gauges_selected_RFG93.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected_RFG93.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected_RFG93.shp b/data_exemple_les_collieres/gauges_selected_RFG93.shp new file mode 100644 index 0000000000000000000000000000000000000000..0aeecdd20c0f23ca9418a7d9a26d912e3e989ae6 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_RFG93.shp differ diff --git a/data_exemple_les_collieres/gauges_selected_RFG93.shx b/data_exemple_les_collieres/gauges_selected_RFG93.shx new file mode 100644 index 0000000000000000000000000000000000000000..73817abd284acca58745094af228aedd154d088c Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_RFG93.shx differ diff --git a/data_exemple_les_collieres/gauges_selected_bad.cpg b/data_exemple_les_collieres/gauges_selected_bad.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected_bad.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected_bad.dbf b/data_exemple_les_collieres/gauges_selected_bad.dbf new file mode 100644 index 0000000000000000000000000000000000000000..4b1f0fd17ac90775a72d731e1d57d186e3d001c9 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_bad.dbf differ diff --git a/data_exemple_les_collieres/gauges_selected_bad.prj b/data_exemple_les_collieres/gauges_selected_bad.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/data_exemple_les_collieres/gauges_selected_bad.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/data_exemple_les_collieres/gauges_selected_bad.shp b/data_exemple_les_collieres/gauges_selected_bad.shp new file mode 100644 index 0000000000000000000000000000000000000000..b8e462a8a120c9d51b4d03e7d61636d052aea82e Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_bad.shp differ diff --git a/data_exemple_les_collieres/gauges_selected_bad.shx b/data_exemple_les_collieres/gauges_selected_bad.shx new file mode 100644 index 0000000000000000000000000000000000000000..cc362f0c988706dff24673d53a8d73a39168feb7 Binary files /dev/null and b/data_exemple_les_collieres/gauges_selected_bad.shx differ diff --git a/data_exemple_les_collieres/geology_RGF93.tif b/data_exemple_les_collieres/geology_RGF93.tif new file mode 100644 index 0000000000000000000000000000000000000000..7f667b38856dc7156eaf4f4b6622f7fcda09c879 Binary files /dev/null and b/data_exemple_les_collieres/geology_RGF93.tif differ diff --git a/data_exemple_les_collieres/hrudelin_config_collieres.cfg b/data_exemple_les_collieres/hrudelin_config_collieres.cfg new file mode 100644 index 0000000000000000000000000000000000000000..f3ff13ffd3e7a3a62c36057d0c3d20c881891d11 --- /dev/null +++ b/data_exemple_les_collieres/hrudelin_config_collieres.cfg @@ -0,0 +1,162 @@ +# ----------- +# environment +# ----------- + +[dir_in] +dir:/home/michael.rabotin/1_HYBV/HRU_DELIN/hru-delin-dev/data_exemple_les_collieres + +[dem] +dem:mnt_RGF93.tif + + +[data] +hgeo:geology_RGF93.tif +landuse:landuse_RGF93.tif +soil:soil_RGF93.tif + + +[gauges] +gauges:gauges_selected_bad.shp +#for watershed ID, used for identification of watersheds +gauges_col_name=ID +# drained surface +gauges_area_col_name=RASTERVALU +relocated_gauges= + + +[irrigation] +# yes or no +to_do:no +irrigation: +irrig_col_name= +#for irrig_col_type, 2 for groundwater and 3 for surfaceirrig_col_type= + +# you can indicate a minimum surface value for an HRU to be a GU: +#minimum surface can be null, global (irrig_surf_min_GU) or spatialized (irrig_col_min_GU) +irrig_surf_min_GU= +irrig_col_min_GU= +#you can specify a maximum distance search for GU (default is 5000 m +irrig_distance_GU=5000 + +irrigation_sector: +irrig_sector_col_name= +irrig_col_sau_irr= +irrig_col_dom_sau_irr= +irrigation_table: +relocated_irrigation= + +[dams] +# yes or no +to_do:no +dams= +dams_col_name= +dams_smax= +dams_s0= +#drained surface +dams_area_col_name= +relocated_dams= + +[dir_out] +files:/home/michael.rabotin/temporaires/collieres/files +results:/home/michael.rabotin/temporaires/collieres/OUT +# ------------------------- +# 1st step : hru-delin_init +# ------------------------- + +[surface] +#selection: total -> full dem +# polygon -> polygon: name of the shapefile +# coords -> give the coords upper left (west and north) and lower right (east and south) +selection:polygon +polygon:watershed.shp +west: +north: +east: +south: + + +[demfill] +# +# if demfill = yes : depressionless DEM will be generated +# no : no action on input DEM +# +demfill:yes + +# +# if rules_auto_* = yes : rules will be calculated by the module +# if no : fill the corresponding file (reclass_default_rules_*) +# +[reclass_dem] +rules_auto_dem:yes +step_dem:90 + +[reclass_slope] +rules_auto_slope:yes + +[reclass_aspect] +rules_auto_aspect:yes + +[basin_min_size] +# minimum size of calculated watersheds (r.watershed) +# number of pixels +# size = N = SURFACE_km2 / ( RES_km2^2 ) +# ex: S = 10km2, RES = 200m = 0.2 km ==> N = 250 pixels +# S = 20km2, RES = 90m = 0.09 km ==> N = 2469 pixels +size=2469 + +# --------------------------- +# 2nd step : hru-delin_basins +# --------------------------- +# So it's possible to specify a variable using : or = ??? +[auto_relocation] +# yes or no +to_do: +# -------- first rule +# surface is in percent! +# distance is in pixels: N = D / RES +# example: for 3km distance with a 50m DEM, the number of pixel is: 3000/50 = 33 pixels +surface_tolerance_1=10 +distance_tolerance_1=33 +# -------- second rule +# second rule with a distance tolerance of 6km and a surface tolerance of 30% +surface_tolerance_2=30 +distance_tolerance_2=67 + +# unit = 1 : m , = 2 : km +area_unit=2 + + + +# --------------------------- +# 3rd step : hru-delin_hrugen +# --------------------------- + +[hrus_min_surface] +# there, this is in pixel so pay attention to the DEM resolution +# same as for bassin_min_size: N = SURFACE_km2 / ( RES_km2^2 ) # see 'size' parameter in step 1 for examples +# 2 km2 = 247 pixels +surface=247 + +# +# MNT-derived layers to be integrated in the overlay operation +# +[layer_overlay] +# yes or no +dem:yes +slope:yes +aspect:yes + +# -------------------------------- +# 4th step : hru-delin_parms_J2000 +# -------------------------------- +# yes or no +[topology] +dissolve_cycle:yes +hru_no_topology_log:yes +OF_domain_export:yes +[hru_param] +hru_cat:no +hru_landuse:no +[irrigation_analysis] +HRU_file:/home/michael.rabotin/temporaires/hru.shp +cantons_file:/home/michael.rabotin/temporaires/cantons_irrigues.shp diff --git a/data_exemple_les_collieres/landuse_RGF93.tif b/data_exemple_les_collieres/landuse_RGF93.tif new file mode 100644 index 0000000000000000000000000000000000000000..3c40f732d9323200d4651a458c97b5e2df940ed7 Binary files /dev/null and b/data_exemple_les_collieres/landuse_RGF93.tif differ diff --git a/data_exemple_les_collieres/mnt_RGF93.tif b/data_exemple_les_collieres/mnt_RGF93.tif new file mode 100644 index 0000000000000000000000000000000000000000..6f3cd5d244ac0795fd87d027969c650e4ca5eff6 Binary files /dev/null and b/data_exemple_les_collieres/mnt_RGF93.tif differ diff --git a/data_exemple_les_collieres/soil_RGF93.tif b/data_exemple_les_collieres/soil_RGF93.tif new file mode 100644 index 0000000000000000000000000000000000000000..aaf2919717080c7ab346c325b4344acb0be4b9e5 Binary files /dev/null and b/data_exemple_les_collieres/soil_RGF93.tif differ diff --git a/data_exemple_les_collieres/watershed.cpg b/data_exemple_les_collieres/watershed.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/data_exemple_les_collieres/watershed.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/data_exemple_les_collieres/watershed.dbf b/data_exemple_les_collieres/watershed.dbf new file mode 100644 index 0000000000000000000000000000000000000000..f84470200db41aecc0cbc9587fa456d4fb4cc8b4 Binary files /dev/null and b/data_exemple_les_collieres/watershed.dbf differ diff --git a/data_exemple_les_collieres/watershed.prj b/data_exemple_les_collieres/watershed.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/data_exemple_les_collieres/watershed.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/data_exemple_les_collieres/watershed.shp b/data_exemple_les_collieres/watershed.shp new file mode 100644 index 0000000000000000000000000000000000000000..d8956b2e8dbe7ff0c1205a9bcde80bae3a55af7c Binary files /dev/null and b/data_exemple_les_collieres/watershed.shp differ diff --git a/data_exemple_les_collieres/watershed.shx b/data_exemple_les_collieres/watershed.shx new file mode 100644 index 0000000000000000000000000000000000000000..a4001dd1c6a5e3bad8834f80a9e2ec10a2373879 Binary files /dev/null and b/data_exemple_les_collieres/watershed.shx differ diff --git a/gateway-main/README.md b/gateway-main/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6b7ee62f551619a9c410b003dfde9aec23381007 --- /dev/null +++ b/gateway-main/README.md @@ -0,0 +1,283 @@ +# 1- Utilisation de la passerelle +## 1-2- La passerelle (Gateway) + +La passerelle utilise le protocole de communication RPC (Remote Procedure Call), dans notre cas, cela signifie que l'on va appeler un programme depuis un autre en utilisant des appels de commandes. + +Pour l'utilisateur de la passerelle, cela est rendu abstrait grâce à une série de librairies écrite dans différents langages. + +Le but de cette passerelle étant de faciliter la communication d'un programme A avec un programme B, sachant que A et B peuvent être dans deux langages de programmation différent. + +Pour cela, l'utilisateur va utiliser une librairie qui sera écrite dans le même langage que son programme. Que son but soit d'écrire le programme appelant ou appelé ne change rien à l'ajout de la librairie dans son projet, cependant cela changera ce qu'il va faire avec et comment il va l'utiliser. + +Les librairies de la passerelle servent ainsi d’interface entre la passerelle en elle-même et les programmes ciblent, rendant la programmation de la communication entre les programmes similaires à des appels d’API. + + +## 1-2- Sender (programme A) +Cette section est là pour vous aider lorsque vous coderez le programme du sender - soit le programme qui va lancer un second programme. + + +### 1-2-1- Ce qui est possible + +Le programme sender est celui qui va demander à la passerelle de lancer le second programme. +Vous pourrez utiliser la classe SenderStub pour préparer les données à envoyer au second programme, préparer un fichier de sorties pour de possible outputs et bien sûr, vous pouvez lancer le second programme avec la fonction run. + +### 1-2-2- Ce que vous voudrez faire + +Après avoir importé la librairie dans votre projet, l'une des premières choses à faire est d'instancier la classe SenderStub, c'est à partir d'elle que vous ferez toutes vos opérations par rapport à l'exécution du second programme. +Pour l'instancier, vous aurez besoin d'un ExecFile (qui représente le programme à exécuter) et un OutputFile (fichier qui contiendra de possible sorties de la part de la passerelle et de l'ExecFile) + +Vous pouvez ajouter ces fichiers plus tard au SenderStub, mais je conseil de le faire à l'initialisation pour ne pas l'oublier plus tard. +Vous pouvez aussi ne pas renseigner de fichier d'output si vous n'en avez pas l'utilité. + +Une fois, cela fait, vous pourrez ajouter une série de valeurs que vous voulez faire passer au second programme. +Par exemple, si le second programme a besoin d’un fichier pour fonctionner, c'est le moment de faire passer le chemin vers ce fichier. + +Pour cela, on va faire passer une série d'Argument, sous forme de Parameter et de groupement de Parameter : les Dictionary. +Un Dictionary peut contenir plusieurs Parameter, mais aussi plusieurs Dictionary. Un Parameter représente la variable à faire passer directement, avec un nom et une valeur. + +Vous voudrez donc créer un premier Dictionary de valeur, en l'instanciant avec un nom. Puis le remplir d'Argument. +Soit avec addArgument() (qui permet donc d'ajouter soit un Dictionary, soit un Parameter) soit avec addParameter() qui va directement ajouter un paramètre au Dictionary. + +Finalement, vous pouvez utiliser la fonction run() de SenderStub, en passant en paramètre le chemin de l'exécutable de la passerelle. +Cela aura pour effet de lancer la passerelle, qui va ensuite lancer votre programme. + +Via la sortie standard (le terminal/console normalement) vous aurez toutes les sorties normales (un print ou un cout auras le même effet que d'habitude, même si utilisé dans le programme lancé par la passerelle par exemple). De même que pour les sorties d'erreur. + + +## 1-3- Receiver (programme B) +Cette section est là pour vous aider lorsque vous coderez le programme du receiver - soit le programme qui se fait lancer par un premier programme. + + +### 1-3-1- Ce qui est possible +Vous pouvez tout à fait ne rien faire du tout. Le programme sera lancé normalement comme si vous l'aviez lancé vous-même à la main. + +En revanche, si vous voulez récupérer des données depuis la passerelle ou envoyer des résultats (outputs) au premier programme, vous devrez écrire quelques lignes de code. + + +### 1-3-2- Ce que vous voudrez faire +Si vous ne voulez pas / n'avez pas besoin de récupérer de données depuis la passerelle, vous pouvez tout à fait ne rien faire du tout et ne pas changer votre programme. +En revanche, si vous voulez interagir avec la passerelle, la première chose à faire après avoir importé la librairie dans votre projet sera d'instancier ReceiverStub. + +À partir de ReceiverStub, vous pouvez utiliser les différentes fonctions d'affichage (aussi disponible dans SenderStub) pour voir les données auxquelles vous avez accès. +Vous pouvez ensuite utiliser les fonctions telles que findDictionaryWithName ou findArgumentWithName ou plus directement getArgument (qui fonctionne aussi sur les Dictionary que vous récupérerez) pour récupérer les variables et autres valeurs qui viennent du programme sender. + +Quand vous aurez récupéré ce que vous voulez, vous pourrez les utiliser facilement dans le reste de votre code. + +Finalement, une dernière possibilité que vous offre ReceiverStub est d'utiliser un outputFile et d'envoyer des données de sortie (seulement s'il a été prédéfini par le sender) de cette manière, vous pouvez passer des dictionnaires au fichier de sortie pour être récupéré facilement par le programme sender. + + +## 2- Aide à l’Installation + +Pour utiliser la passerelle, vous devez télécharger les librairies et la passerelle C++, vous devrez ensuite importer les librairies nécessaires à vos projets, et compilez le code C++ pour pouvoir l’utiliser. + +Pour cela, rendez-vous sur le lien gitlab ci-dessous et clonez ou téléchargez le zip du projet (qu’il faudra donc dézipper) +https://gitlab.com/theolabt/gateway + +Dans ce projet, les deux dossiers qui vous intéressent seront : +- gateway : qui contient la passerelle en C++ +- libraries : qui contiens les différentes librairies à intégrer ou non à vos projets + + +## 2-1- Compiler la passerelle + +Pour compiler la passerelle, vous aurez besoin de g++ d’installer. +Pour vérifier si c’est le cas, ouvrez un terminal (PowerShell sous Windows par exemple) et entrez : + +`g++ --version` + + +Si en retour vous obtenez un message vous disant que la commande n’a pas été trouvé, c’est que vous devrez installer g++. +Si g++ a déjà été installer, mais que vous avez quand même le message d’erreur, il est possible que vous deviez ajouter le chemin d’installation dans votre PATH. + + +Si vous avez bien g++ d’installé, rendez-vous dans le dossier gateway qui contient la passerelle en c++ et ouvrez un terminal. +Pour compiler, tapez : + +`g++ main.cpp GateStub/*.cpp -o build/gate.exe` + + +Si cela fonctionne, vous n’aurez pas de message d’erreur dans le terminal et vous trouverez un nouveau fichier gate.exe dans le dossier build. + +C’est le chemin vers ce fichier gate.exe que vous devrez passer au SenderStub quand vous voudrez utiliser la fonction run() qui permet d’appeler votre second programme. + + +## 2-2- Importer la librairie au projet + +Les librairies se situent dans le dossier libraries. +Vous aurez besoin de récupérer le dossier qui a le nom du langage que vous utilisez + Gate. +Par exemple pour python, il s’agira de pythonGate. + +Ensuite, les manières d’importer la librairie dans votre projet dépendent du langage que vous utilisez. + + +### Python + +Récupérer le package pythonGate qui se trouve dans le dossier libraries du projet gateway et copiez collez le à la racine du projet sur lequel vous travaillez. + +Pour ensuite utilisez la librairie pythonGate dans votre programme, vous pouvez écrire + +```python +import pythonGate as pyGate + +sndStub = pyGate.SenderStub() +``` + + + +Vous pouvez désormais utilisez la librairie pythonGate dans python pour communiquer avec la passerelle. + +### R + +Récupérer le dossier Rgate qui se trouve dans le dossier libraries du projet gateway et copiez collez le au même niveau que le programme sur lequel vous travaillez. +changer le nom du dossier en `lib` (ou mettez le contenue dans un autres dossier lib) + +```R +source("lib/rgate.R") + +sndStub = SenderStub$new() +``` + +Vous devriez désormais pouvoir utilisez la librairie rGate dans R pour communiquer avec la passerelle. + +/!\ les classes en R utilisent R6, les arguments et fonctions sont donc accessible avec le symbole $ + +### C++ + +Récupérer le dossier cppGate qui se trouve dans le dossier libraries du projet gateway et placez le dans votre projet. +Vous devrez simplement récupérer le chemin relatif vers le fichier ReceiverStub.h ou SenderStub.h en fonction de votre programme. + +```cpp +#include "../lib/cppGate/SenderStub.h" // par exemple + +SenderStub* sndStub = new SenderStub() +``` + +Vous devriez désormais pouvoir utilisez la librairie rGate dans R pour communiquer avec la passerelle. + +/!\ les objets sont ici géré avec des pointeurs. + +# 3- Documentation +## 3-1- Diagrammes de classe et explications + + + + +Ce diagramme de classe représente l’architecture objet utilisé dans chaque librairie, mais aussi dans la passerelle. +On y retrouve toutes les classes utilisées lors du fonctionnement de la passerelle, sauf pour les stubs que l’on verra plus tard. + + +Commençons par parler des classes File. +La classe File en elle-même est abstraite (et ne peut donc pas être instanciée) mais elle sert de modèle pour ses classes filles : ExecFile et OutputFile qui ont donc aussi les attributs name et path. + + +Les classes Argument sont organisées suivant le design pattern Composite. +Les classes Dictionary et Parameter héritent d’Argument. Dictionary contient un tableau d’Argument. +Un Dictionary peut donc contenir une série de Paramater, mais aussi une série de Dictionary (qui eux même pourront contenir d’autres Dictionary et d’autres Parameter). + +Toutes les classes héritent de l’interface Serializable qui pousse les classes à définir une fonction serialize(), le but étant de pouvoir récupérer les informations de toutes les classes sous un format facilement transportable (ici, un string json). Serializable sert donc à simplifier l’utilisation de cette implémentation. En tant qu’utilisateurs, vous n’aurez pas besoin de vous servir de cette classe ni des fonctions serialize(), mais savoir qu’elle existe peut vous servir si vous cherchez à comprendre comment la passerelle fonctionne. + + + + +Ce second diagramme de classe se concentre sur les stubs. +Les stubs sont des classes qui servent d’interfaces. Ce sont elles que l’utilisateur va directement manipuler pour communiquer avec la passerelle. + +Il y a 3 types de stubs : +- SenderStub, qui est utilisé par le programme qui appelle la passerelle +- ReceiverStub, qui est utilisé par le programme qui se fait appeler +- GateStub, qui est l’interface du côté de la passerelle et qu’en tant qu’utilisateurs, vous ne manipulerez pas. + +Ce diagramme montre que chaque stub contient une liste de DataFile et de Dictionary pour faire passer les données et un OutputFile (même si c’est possible de ne pas en renseigner). +De plus, la SenderStub et la GateStub contiennent un ExecFile qu’il faut renseigner dans le SenderStub, sans quoi la passerelle ne pourra pas exécuter le second programme. + +## 3-2- Classes et fonctions +Dans cette section, vous trouverez une description de toutes les classes et de leurs fonctions. + + +### 3-2-1- File +File est une classe abstraite qui gère les données générales des fichiers + +Attributs : + name - string + file.txt +Name est le nom du fichier, donnée avec son extension + + path - string + /home/files/ +Path est le chemin absolu vers le fichier + + + +### 3-2-2- ExecFile +Cette classe sert à trouver le programme qui va être appelé par la passerelle + +Attributs : + cmd - string + python3 +Cmd est la commande qui sera utilisée pour appeler le programme, dans le cas de python ça peut être python3 ou python par exemple. + + + +### 3-2-1- OutputFile +OutputFile est utilisé pour accéder facilement à un fichier qui contiendra les sorties des résultats de la passerelle. +L'utilisateur peut également ajouter ses propres sorties (comme les données qui seront réutilisées dans le premier programme). Si vous ne souhaitez pas utiliser cette fonctionnalité, il suffit de ne pas remplir le champ OutputFile dans le SenderStub. + + +### 3-2-4- Argument +Argument est la classe principale pour les informations qui sont passées du programme A au programme B. Cette classe est abstraite, elle ne peut être instanciée que via les classes Parameter et Dictionary. + + +Attributs: + name - string + Values name +Donne un nom à l’attribut. Il permet de représenter la valeur, mais aussi de le retrouver plus tard. + + + +### 3-2-5- Parameter +La classe Parameter est la plus basique pour enregistrer et transporter les informations sous un format nom – valeur + +Attributs : + value - string + value +Représente la valeur qui est passée dans la passerelle. Elle est enregistrée en string pour faciliter son transport et sa sérialisation, mais elle peut ensuite être parsé pour redevenir dans le type souhaité. + +Fonctions : + string display () +Affiche le contenu du Parameter et retourne le string de l’affichage + + Argument getArgument (name - string) +Vérifie que le paramètre a le bon nom et le retourne si oui, sinon retourne null. +Existe pour fonctionner avec la fonction récursive getArgument de Dictionary. + + int getValueAsInt () +Parse la value enregistré en string dans Parameter et le retourne en int. + + float getValueAsFloat () +Parse la value enregistré en string dans Parameter et le retourne en float. + + list getValueAsList () +Parse la value enregistré en string dans Parameter et le retourne en list. + + +### 3-2-6- Dictionary +Le dictionnaire est également utilisé pour stocker des données, mais d'une manière plus générale puisqu'il stocke des arguments au lieu de stocker directement des paramètres. + +Attributs : + value – list of Argument + list of Parameter and Dictionary + +Représente la valeur qui est passée dans la passerelle. Elle est enregistrée en string pour faciliter son transport et sa sérialisation, mais elle peut ensuite être parser pour redevenir dans le type souhaité. + +Fonctions : + string display () +Affiche le contenu du Dictionary et retourne le string de l’affichage + + Argument getArgument (name - string) +Cherche récursivement dans la liste du Dictionary pour trouver un Argument (un autre Dictionary ou un Parameter) avec le nom recherché. S’il y a deux Argument avec le même nom, seul le premier trouvé sera retourné. + + Argument addParameter (name - string, value - Any) +Récupère les argument name et value pour créer un Parameter et l’ajouter à la liste du Dictionary. + + Argument addArgument (argument - Argument) +Prends un Argument (Parameter ou Dictionary) en paramètre et l’ajoute à la liste d’Argument du Dictionary. diff --git a/gateway-main/gateway/GateStub/ExecFile.cpp b/gateway-main/gateway/GateStub/ExecFile.cpp new file mode 100644 index 0000000000000000000000000000000000000000..38b506145fbee0130accbdb1814182989b99f9bd --- /dev/null +++ b/gateway-main/gateway/GateStub/ExecFile.cpp @@ -0,0 +1,49 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include <stdio.h> +#include "ExecFile.h" + +#ifdef _WIN32 +#define OS 1 +#endif + +#ifdef linux +#define OS 2 +#endif + +namespace gateway { + ExecFile::ExecFile(const std::string &path, const std::string &name, OutputFile *output, const std::string &cmd) + : File(path, name), output(output), cmd(cmd) {} + + ExecFile::ExecFile(const std::string &path, const std::string &name, OutputFile *output, const std::string &cmd, + const std::string &cmdLinux) + : File(path, name), output(output), cmd(cmd), cmdLinux(cmdLinux) {} + + std::string ExecFile::run(const std::string& jsonLine) const { + + std::string cmd; + if(OS == 1 || this->cmdLinux.empty()) + cmd = this->cmd; + else + cmd = this->cmdLinux; + + std::string command = cmd + " " + this->getPath() + this->getName() + " " + jsonLine; + FILE *file = popen(command.c_str(), "r"); + char buffer[100]; + std::string stringBuff; + + if (file == nullptr) perror ("Error opening file"); + else { + while ( !feof(file) ) { + if ( fgets (buffer , 100 , file) == nullptr ) break; + stringBuff += buffer; + } + fclose (file); + } + return stringBuff; + } +} // gateway \ No newline at end of file diff --git a/gateway-main/gateway/GateStub/ExecFile.h b/gateway-main/gateway/GateStub/ExecFile.h new file mode 100644 index 0000000000000000000000000000000000000000..4a2e25ce1a468ca46918565fc109db9a66449dfa --- /dev/null +++ b/gateway-main/gateway/GateStub/ExecFile.h @@ -0,0 +1,28 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef GATEWAY_EXECFILE_H +#define GATEWAY_EXECFILE_H + +#include "OutputFile.h" + +namespace gateway { + +class ExecFile: public File { +private: + OutputFile *output; + std::string cmd; + std::string cmdLinux = ""; + +public: + ExecFile(const std::string &path, const std::string &name, OutputFile *output, const std::string &cmd); + ExecFile(const std::string &path, const std::string &name, OutputFile *output, const std::string &cmd, const std::string &cmdLinux); + + std::string run(const std::string& jsonLine) const; +}; +} // gateway + +#endif //GATEWAY_EXECFILE_H diff --git a/gateway-main/gateway/GateStub/File.cpp b/gateway-main/gateway/GateStub/File.cpp new file mode 100644 index 0000000000000000000000000000000000000000..34433dd51cb773e0522ac0819561d7fe7bc0c56a --- /dev/null +++ b/gateway-main/gateway/GateStub/File.cpp @@ -0,0 +1,20 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "File.h" + +namespace gateway { + + File::File(const std::string &path, const std::string &name) : path(path), name(name) {} + File::~File() {} + + const std::string &File::getPath() const { + return path; + } + const std::string &File::getName() const { + return name; + } +} // gateway \ No newline at end of file diff --git a/gateway-main/gateway/GateStub/File.h b/gateway-main/gateway/GateStub/File.h new file mode 100644 index 0000000000000000000000000000000000000000..42a264af1476d989f6d61068bf62f1721df37e9b --- /dev/null +++ b/gateway-main/gateway/GateStub/File.h @@ -0,0 +1,30 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef GATEWAY_FILE_H +#define GATEWAY_FILE_H + +#include <string> + +namespace gateway { + +class File { +protected: + std::string path; + std::string name; + +public: + File(const std::string &path, const std::string &name); + + virtual ~File() =0; + + const std::string &getPath() const; + const std::string &getName() const; +}; + +} // gateway + +#endif //GATEWAY_FILE_H diff --git a/gateway-main/gateway/GateStub/GateStub.cpp b/gateway-main/gateway/GateStub/GateStub.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f84b4da0056d69d208508943b5f45a892966c4dd --- /dev/null +++ b/gateway-main/gateway/GateStub/GateStub.cpp @@ -0,0 +1,45 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include <iostream> +#include "GateStub.h" + +using json = nlohmann::json; + +namespace gateway { +//private: + int GateStub::fillFromJson(const std::string& jsonLine) { + + auto data = json::parse(jsonLine); + + auto outputFile = data["OutputFile"]; + auto execFile = data["ExecFile"]; + auto dataFiles = data["DataFiles"]; + + this->output = new OutputFile(outputFile["path"], outputFile["name"]); + this->execFile = new ExecFile(execFile["path"], execFile["name"], this->output, execFile["cmd"], + execFile["cmdAlt"]); + + return 0; + } + +//public: + GateStub::GateStub(const std::string &jsonLine) { + fillFromJson(jsonLine); + this->jsonLine = jsonLine; + output->initialize(); + } + + ExecFile *GateStub::getExecFile() const { + return execFile; + } + OutputFile *GateStub::getOutput() const { + return output; + } + const std::string &GateStub::getJsonLine() const { + return jsonLine; + } +} // gateway \ No newline at end of file diff --git a/gateway-main/gateway/GateStub/GateStub.h b/gateway-main/gateway/GateStub/GateStub.h new file mode 100644 index 0000000000000000000000000000000000000000..a545d83e1ac45a6f156ca92e7a5deb04a4f8077b --- /dev/null +++ b/gateway-main/gateway/GateStub/GateStub.h @@ -0,0 +1,32 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef GATEWAY_GATESTUB_H +#define GATEWAY_GATESTUB_H + +#include "../lib/json.hpp" +#include "ExecFile.h" + +namespace gateway { + +class GateStub { +private: + ExecFile *execFile; + OutputFile *output; + std::string jsonLine; + + int fillFromJson(const std::string& jsonLine); +public: + GateStub(const std::string& jsonLine); + + ExecFile *getExecFile() const; + OutputFile *getOutput() const; + + const std::string &getJsonLine() const; +}; +} // gateway + +#endif //GATEWAY_GATESTUB_H diff --git a/gateway-main/gateway/GateStub/OutputFile.cpp b/gateway-main/gateway/GateStub/OutputFile.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ef6db01061740f83ab8d17e5648995b77f934bf1 --- /dev/null +++ b/gateway-main/gateway/GateStub/OutputFile.cpp @@ -0,0 +1,52 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include <fstream> +#include "OutputFile.h" +#include "../lib/json.hpp" + +using json = nlohmann::json; + +namespace gateway { +//private + std::string OutputFile::read() { + if(this->name != "" && this->path != "") { + std::ifstream file (this->path + this->name); + if(!file.is_open()) { + return "{}"; + } + + std::string line, output = ""; + while ( getline (file,line) ) + { + output += line; + } + file.close(); + + return output; + } + return "{}"; + } + +//public + OutputFile::OutputFile(const std::string &path, const std::string &name) : File(path, name) {} + + + int OutputFile::initialize() const { + if(this->name != "" && this->path != "") { + std::ofstream file(this->path + this->name); + + file << R"( {"Outputs":[]})" << std::endl; + + file.close(); + return 0; + } + return -1; + } + + + +} // gateway \ No newline at end of file diff --git a/gateway-main/gateway/GateStub/OutputFile.h b/gateway-main/gateway/GateStub/OutputFile.h new file mode 100644 index 0000000000000000000000000000000000000000..dfb9cd863ad93bdbd37ac99aff22268829dc7adc --- /dev/null +++ b/gateway-main/gateway/GateStub/OutputFile.h @@ -0,0 +1,27 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef GATEWAY_OUTPUTFILE_H +#define GATEWAY_OUTPUTFILE_H + +#include "File.h" + +namespace gateway { + +class OutputFile: public File { +private: + std::string read(); + +public: + OutputFile(const std::string &path, const std::string &name); + + int initialize() const; + +}; + +} // gateway + +#endif //GATEWAY_OUTPUTFILE_H diff --git a/gateway-main/gateway/build/.gitkeep b/gateway-main/gateway/build/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/gateway-main/gateway/lib/json.hpp b/gateway-main/gateway/lib/json.hpp new file mode 100644 index 0000000000000000000000000000000000000000..2837e74b9e5aa3685f1a6b8fd30a0c1637c3889f --- /dev/null +++ b/gateway-main/gateway/lib/json.hpp @@ -0,0 +1,23635 @@ +/* + __ _____ _____ _____ + __| | __| | | | JSON for Modern C++ +| | |__ | | | | | | version 3.10.5 +|_____|_____|_____|_|___| https://github.com/nlohmann/json + +Licensed under the MIT License <http://opensource.org/licenses/MIT>. +SPDX-License-Identifier: MIT +Copyright (c) 2013-2022 Niels Lohmann <http://nlohmann.me>. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +/****************************************************************************\ + * Note on documentation: The source files contain links to the online * + * documentation of the public API at https://json.nlohmann.me. This URL * + * contains the most recent documentation and should also be applicable to * + * previous versions; documentation for deprecated functions is not * + * removed, but marked deprecated. See "Generate documentation" section in * + * file docs/README.md. * +\****************************************************************************/ + +#ifndef INCLUDE_NLOHMANN_JSON_HPP_ +#define INCLUDE_NLOHMANN_JSON_HPP_ + +#ifndef JSON_SKIP_LIBRARY_VERSION_CHECK + #if defined(NLOHMANN_JSON_VERSION_MAJOR) && defined(NLOHMANN_JSON_VERSION_MINOR) && defined(NLOHMANN_JSON_VERSION_PATCH) + #if NLOHMANN_JSON_VERSION_MAJOR != 3 || NLOHMANN_JSON_VERSION_MINOR != 10 || NLOHMANN_JSON_VERSION_PATCH != 5 + #warning "Already included a different version of the library!" + #endif + #endif +#endif + +#define NLOHMANN_JSON_VERSION_MAJOR 3 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_MINOR 10 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_PATCH 5 // NOLINT(modernize-macro-to-enum) + +#include <algorithm> // all_of, find, for_each +#include <cstddef> // nullptr_t, ptrdiff_t, size_t +#include <functional> // hash, less +#include <initializer_list> // initializer_list +#ifndef JSON_NO_IO + #include <iosfwd> // istream, ostream +#endif // JSON_NO_IO +#include <iterator> // random_access_iterator_tag +#include <memory> // unique_ptr +#include <numeric> // accumulate +#include <string> // string, stoi, to_string +#include <utility> // declval, forward, move, pair, swap +#include <vector> // vector + +// #include <nlohmann/adl_serializer.hpp> + + +#include <type_traits> +#include <utility> + +// #include <nlohmann/detail/conversions/from_json.hpp> + + +#include <algorithm> // transform +#include <array> // array +#include <forward_list> // forward_list +#include <iterator> // inserter, front_inserter, end +#include <map> // map +#include <string> // string +#include <tuple> // tuple, make_tuple +#include <type_traits> // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +#include <unordered_map> // unordered_map +#include <utility> // pair, declval +#include <valarray> // valarray + +// #include <nlohmann/detail/exceptions.hpp> + + +#include <cstddef> // nullptr_t +#include <exception> // exception +#include <stdexcept> // runtime_error +#include <string> // to_string +#include <vector> // vector + +// #include <nlohmann/detail/value_t.hpp> + + +#include <array> // array +#include <cstddef> // size_t +#include <cstdint> // uint8_t +#include <string> // string + +// #include <nlohmann/detail/macro_scope.hpp> + + +#include <utility> // declval, pair +// #include <nlohmann/thirdparty/hedley/hedley.hpp> + + +/* Hedley - https://nemequ.github.io/hedley + * Created by Evan Nemerson <evan@nemerson.com> + * + * To the extent possible under law, the author(s) have dedicated all + * copyright and related and neighboring rights to this software to + * the public domain worldwide. This software is distributed without + * any warranty. + * + * For details, see <http://creativecommons.org/publicdomain/zero/1.0/>. + * SPDX-License-Identifier: CC0-1.0 + */ + +#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 15) +#if defined(JSON_HEDLEY_VERSION) + #undef JSON_HEDLEY_VERSION +#endif +#define JSON_HEDLEY_VERSION 15 + +#if defined(JSON_HEDLEY_STRINGIFY_EX) + #undef JSON_HEDLEY_STRINGIFY_EX +#endif +#define JSON_HEDLEY_STRINGIFY_EX(x) #x + +#if defined(JSON_HEDLEY_STRINGIFY) + #undef JSON_HEDLEY_STRINGIFY +#endif +#define JSON_HEDLEY_STRINGIFY(x) JSON_HEDLEY_STRINGIFY_EX(x) + +#if defined(JSON_HEDLEY_CONCAT_EX) + #undef JSON_HEDLEY_CONCAT_EX +#endif +#define JSON_HEDLEY_CONCAT_EX(a,b) a##b + +#if defined(JSON_HEDLEY_CONCAT) + #undef JSON_HEDLEY_CONCAT +#endif +#define JSON_HEDLEY_CONCAT(a,b) JSON_HEDLEY_CONCAT_EX(a,b) + +#if defined(JSON_HEDLEY_CONCAT3_EX) + #undef JSON_HEDLEY_CONCAT3_EX +#endif +#define JSON_HEDLEY_CONCAT3_EX(a,b,c) a##b##c + +#if defined(JSON_HEDLEY_CONCAT3) + #undef JSON_HEDLEY_CONCAT3 +#endif +#define JSON_HEDLEY_CONCAT3(a,b,c) JSON_HEDLEY_CONCAT3_EX(a,b,c) + +#if defined(JSON_HEDLEY_VERSION_ENCODE) + #undef JSON_HEDLEY_VERSION_ENCODE +#endif +#define JSON_HEDLEY_VERSION_ENCODE(major,minor,revision) (((major) * 1000000) + ((minor) * 1000) + (revision)) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MAJOR) + #undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MAJOR(version) ((version) / 1000000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MINOR) + #undef JSON_HEDLEY_VERSION_DECODE_MINOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MINOR(version) (((version) % 1000000) / 1000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_REVISION) + #undef JSON_HEDLEY_VERSION_DECODE_REVISION +#endif +#define JSON_HEDLEY_VERSION_DECODE_REVISION(version) ((version) % 1000) + +#if defined(JSON_HEDLEY_GNUC_VERSION) + #undef JSON_HEDLEY_GNUC_VERSION +#endif +#if defined(__GNUC__) && defined(__GNUC_PATCHLEVEL__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, __GNUC_PATCHLEVEL__) +#elif defined(__GNUC__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, 0) +#endif + +#if defined(JSON_HEDLEY_GNUC_VERSION_CHECK) + #undef JSON_HEDLEY_GNUC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GNUC_VERSION) + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GNUC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION) + #undef JSON_HEDLEY_MSVC_VERSION +#endif +#if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 140000000) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 10000000, (_MSC_FULL_VER % 10000000) / 100000, (_MSC_FULL_VER % 100000) / 100) +#elif defined(_MSC_FULL_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 1000000, (_MSC_FULL_VER % 1000000) / 10000, (_MSC_FULL_VER % 10000) / 10) +#elif defined(_MSC_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_VER / 100, _MSC_VER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION_CHECK) + #undef JSON_HEDLEY_MSVC_VERSION_CHECK +#endif +#if !defined(JSON_HEDLEY_MSVC_VERSION) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (0) +#elif defined(_MSC_VER) && (_MSC_VER >= 1400) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 10000000) + (minor * 100000) + (patch))) +#elif defined(_MSC_VER) && (_MSC_VER >= 1200) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 1000000) + (minor * 10000) + (patch))) +#else + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_VER >= ((major * 100) + (minor))) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION) + #undef JSON_HEDLEY_INTEL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, __INTEL_COMPILER_UPDATE) +#elif defined(__INTEL_COMPILER) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_VERSION) + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #undef JSON_HEDLEY_INTEL_CL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && defined(__ICL) + #define JSON_HEDLEY_INTEL_CL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER, __INTEL_COMPILER_UPDATE, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_CL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION) + #undef JSON_HEDLEY_PGI_VERSION +#endif +#if defined(__PGI) && defined(__PGIC__) && defined(__PGIC_MINOR__) && defined(__PGIC_PATCHLEVEL__) + #define JSON_HEDLEY_PGI_VERSION JSON_HEDLEY_VERSION_ENCODE(__PGIC__, __PGIC_MINOR__, __PGIC_PATCHLEVEL__) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION_CHECK) + #undef JSON_HEDLEY_PGI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PGI_VERSION) + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PGI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #undef JSON_HEDLEY_SUNPRO_VERSION +#endif +#if defined(__SUNPRO_C) && (__SUNPRO_C > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_C >> 16) & 0xf) * 10) + ((__SUNPRO_C >> 12) & 0xf), (((__SUNPRO_C >> 8) & 0xf) * 10) + ((__SUNPRO_C >> 4) & 0xf), (__SUNPRO_C & 0xf) * 10) +#elif defined(__SUNPRO_C) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_C >> 8) & 0xf, (__SUNPRO_C >> 4) & 0xf, (__SUNPRO_C) & 0xf) +#elif defined(__SUNPRO_CC) && (__SUNPRO_CC > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_CC >> 16) & 0xf) * 10) + ((__SUNPRO_CC >> 12) & 0xf), (((__SUNPRO_CC >> 8) & 0xf) * 10) + ((__SUNPRO_CC >> 4) & 0xf), (__SUNPRO_CC & 0xf) * 10) +#elif defined(__SUNPRO_CC) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_CC >> 8) & 0xf, (__SUNPRO_CC >> 4) & 0xf, (__SUNPRO_CC) & 0xf) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION_CHECK) + #undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_SUNPRO_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#endif +#if defined(__EMSCRIPTEN__) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION JSON_HEDLEY_VERSION_ENCODE(__EMSCRIPTEN_major__, __EMSCRIPTEN_minor__, __EMSCRIPTEN_tiny__) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_EMSCRIPTEN_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION) + #undef JSON_HEDLEY_ARM_VERSION +#endif +#if defined(__CC_ARM) && defined(__ARMCOMPILER_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCOMPILER_VERSION / 1000000, (__ARMCOMPILER_VERSION % 1000000) / 10000, (__ARMCOMPILER_VERSION % 10000) / 100) +#elif defined(__CC_ARM) && defined(__ARMCC_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCC_VERSION / 1000000, (__ARMCC_VERSION % 1000000) / 10000, (__ARMCC_VERSION % 10000) / 100) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION_CHECK) + #undef JSON_HEDLEY_ARM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_ARM_VERSION) + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_ARM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION) + #undef JSON_HEDLEY_IBM_VERSION +#endif +#if defined(__ibmxl__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ibmxl_version__, __ibmxl_release__, __ibmxl_modification__) +#elif defined(__xlC__) && defined(__xlC_ver__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, (__xlC_ver__ >> 8) & 0xff) +#elif defined(__xlC__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, 0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION_CHECK) + #undef JSON_HEDLEY_IBM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IBM_VERSION) + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IBM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_VERSION) + #undef JSON_HEDLEY_TI_VERSION +#endif +#if \ + defined(__TI_COMPILER_VERSION__) && \ + ( \ + defined(__TMS470__) || defined(__TI_ARM__) || \ + defined(__MSP430__) || \ + defined(__TMS320C2000__) \ + ) +#if (__TI_COMPILER_VERSION__ >= 16000000) + #define JSON_HEDLEY_TI_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif +#endif + +#if defined(JSON_HEDLEY_TI_VERSION_CHECK) + #undef JSON_HEDLEY_TI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_VERSION) + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #undef JSON_HEDLEY_TI_CL2000_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C2000__) + #define JSON_HEDLEY_TI_CL2000_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL2000_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #undef JSON_HEDLEY_TI_CL430_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__MSP430__) + #define JSON_HEDLEY_TI_CL430_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL430_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #undef JSON_HEDLEY_TI_ARMCL_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && (defined(__TMS470__) || defined(__TI_ARM__)) + #define JSON_HEDLEY_TI_ARMCL_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION_CHECK) + #undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_ARMCL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #undef JSON_HEDLEY_TI_CL6X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C6X__) + #define JSON_HEDLEY_TI_CL6X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL6X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #undef JSON_HEDLEY_TI_CL7X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__C7000__) + #define JSON_HEDLEY_TI_CL7X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL7X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #undef JSON_HEDLEY_TI_CLPRU_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__PRU__) + #define JSON_HEDLEY_TI_CLPRU_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CLPRU_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION) + #undef JSON_HEDLEY_CRAY_VERSION +#endif +#if defined(_CRAYC) + #if defined(_RELEASE_PATCHLEVEL) + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, _RELEASE_PATCHLEVEL) + #else + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION_CHECK) + #undef JSON_HEDLEY_CRAY_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_CRAY_VERSION) + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_CRAY_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION) + #undef JSON_HEDLEY_IAR_VERSION +#endif +#if defined(__IAR_SYSTEMS_ICC__) + #if __VER__ > 1000 + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE((__VER__ / 1000000), ((__VER__ / 1000) % 1000), (__VER__ % 1000)) + #else + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE(__VER__ / 100, __VER__ % 100, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION_CHECK) + #undef JSON_HEDLEY_IAR_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IAR_VERSION) + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IAR_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION) + #undef JSON_HEDLEY_TINYC_VERSION +#endif +#if defined(__TINYC__) + #define JSON_HEDLEY_TINYC_VERSION JSON_HEDLEY_VERSION_ENCODE(__TINYC__ / 1000, (__TINYC__ / 100) % 10, __TINYC__ % 100) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION_CHECK) + #undef JSON_HEDLEY_TINYC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TINYC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION) + #undef JSON_HEDLEY_DMC_VERSION +#endif +#if defined(__DMC__) + #define JSON_HEDLEY_DMC_VERSION JSON_HEDLEY_VERSION_ENCODE(__DMC__ >> 8, (__DMC__ >> 4) & 0xf, __DMC__ & 0xf) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION_CHECK) + #undef JSON_HEDLEY_DMC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_DMC_VERSION) + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_DMC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #undef JSON_HEDLEY_COMPCERT_VERSION +#endif +#if defined(__COMPCERT_VERSION__) + #define JSON_HEDLEY_COMPCERT_VERSION JSON_HEDLEY_VERSION_ENCODE(__COMPCERT_VERSION__ / 10000, (__COMPCERT_VERSION__ / 100) % 100, __COMPCERT_VERSION__ % 100) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION_CHECK) + #undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_COMPCERT_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION) + #undef JSON_HEDLEY_PELLES_VERSION +#endif +#if defined(__POCC__) + #define JSON_HEDLEY_PELLES_VERSION JSON_HEDLEY_VERSION_ENCODE(__POCC__ / 100, __POCC__ % 100, 0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION_CHECK) + #undef JSON_HEDLEY_PELLES_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PELLES_VERSION) + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PELLES_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #undef JSON_HEDLEY_MCST_LCC_VERSION +#endif +#if defined(__LCC__) && defined(__LCC_MINOR__) + #define JSON_HEDLEY_MCST_LCC_VERSION JSON_HEDLEY_VERSION_ENCODE(__LCC__ / 100, __LCC__ % 100, __LCC_MINOR__) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION_CHECK) + #undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_MCST_LCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION) + #undef JSON_HEDLEY_GCC_VERSION +#endif +#if \ + defined(JSON_HEDLEY_GNUC_VERSION) && \ + !defined(__clang__) && \ + !defined(JSON_HEDLEY_INTEL_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_ARM_VERSION) && \ + !defined(JSON_HEDLEY_CRAY_VERSION) && \ + !defined(JSON_HEDLEY_TI_VERSION) && \ + !defined(JSON_HEDLEY_TI_ARMCL_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL430_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL2000_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL6X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL7X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CLPRU_VERSION) && \ + !defined(__COMPCERT__) && \ + !defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION JSON_HEDLEY_GNUC_VERSION +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_ATTRIBUTE +#endif +#if \ + defined(__has_attribute) && \ + ( \ + (!defined(JSON_HEDLEY_IAR_VERSION) || JSON_HEDLEY_IAR_VERSION_CHECK(8,5,9)) \ + ) +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) __has_attribute(attribute) +#else +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#endif +#if \ + defined(__has_cpp_attribute) && \ + defined(__cplusplus) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#endif +#if !defined(__cplusplus) || !defined(__has_cpp_attribute) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#elif \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ + (!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_BUILTIN) + #undef JSON_HEDLEY_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_HAS_BUILTIN(builtin) __has_builtin(builtin) +#else + #define JSON_HEDLEY_HAS_BUILTIN(builtin) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_BUILTIN) + #undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_BUILTIN) + #undef JSON_HEDLEY_GCC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_FEATURE) + #undef JSON_HEDLEY_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_HAS_FEATURE(feature) __has_feature(feature) +#else + #define JSON_HEDLEY_HAS_FEATURE(feature) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_FEATURE) + #undef JSON_HEDLEY_GNUC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_FEATURE) + #undef JSON_HEDLEY_GCC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_EXTENSION) + #undef JSON_HEDLEY_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_HAS_EXTENSION(extension) __has_extension(extension) +#else + #define JSON_HEDLEY_HAS_EXTENSION(extension) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_EXTENSION) + #undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_EXTENSION) + #undef JSON_HEDLEY_GCC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_WARNING) + #undef JSON_HEDLEY_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_HAS_WARNING(warning) __has_warning(warning) +#else + #define JSON_HEDLEY_HAS_WARNING(warning) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_WARNING) + #undef JSON_HEDLEY_GNUC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_WARNING) + #undef JSON_HEDLEY_GCC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + defined(__clang__) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,17) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(8,0,0) || \ + (JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) && defined(__C99_PRAGMA_OPERATOR)) + #define JSON_HEDLEY_PRAGMA(value) _Pragma(#value) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_PRAGMA(value) __pragma(value) +#else + #define JSON_HEDLEY_PRAGMA(value) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_PUSH) + #undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#endif +#if defined(JSON_HEDLEY_DIAGNOSTIC_POP) + #undef JSON_HEDLEY_DIAGNOSTIC_POP +#endif +#if defined(__clang__) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("clang diagnostic pop") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH __pragma(warning(push)) + #define JSON_HEDLEY_DIAGNOSTIC_POP __pragma(warning(pop)) +#elif JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("pop") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("diag_push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("diag_pop") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_PUSH + #define JSON_HEDLEY_DIAGNOSTIC_POP +#endif + +/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat") +# if JSON_HEDLEY_HAS_WARNING("-Wc++17-extensions") +# if JSON_HEDLEY_HAS_WARNING("-Wc++1z-extensions") +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + _Pragma("clang diagnostic ignored \"-Wc++1z-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# endif +#endif +#if !defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x +#endif + +#if defined(JSON_HEDLEY_CONST_CAST) + #undef JSON_HEDLEY_CONST_CAST +#endif +#if defined(__cplusplus) +# define JSON_HEDLEY_CONST_CAST(T, expr) (const_cast<T>(expr)) +#elif \ + JSON_HEDLEY_HAS_WARNING("-Wcast-qual") || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_CONST_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_CONST_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_REINTERPRET_CAST) + #undef JSON_HEDLEY_REINTERPRET_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) (reinterpret_cast<T>(expr)) +#else + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_STATIC_CAST) + #undef JSON_HEDLEY_STATIC_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_STATIC_CAST(T, expr) (static_cast<T>(expr)) +#else + #define JSON_HEDLEY_STATIC_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_CPP_CAST) + #undef JSON_HEDLEY_CPP_CAST +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wold-style-cast") +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wold-style-cast\"") \ + ((T) (expr)) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# elif JSON_HEDLEY_IAR_VERSION_CHECK(8,3,0) +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("diag_suppress=Pe137") \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_CPP_CAST(T, expr) ((T) (expr)) +# endif +#else +# define JSON_HEDLEY_CPP_CAST(T, expr) (expr) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wdeprecated-declarations") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warning(disable:1478 1786)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:1478 1786)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1216,1444,1445") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:4996)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1291,1718") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && !defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,E_DEPRECATED_ATT,E_DEPRECATED_ATT_MESS)") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,symdeprecated,symdeprecated2)") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress=Pe1444,Pe1215") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warn(disable:2241)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("clang diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("warning(disable:161)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:161)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 1675") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("GCC diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:4068)) +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(16,9,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress=Pe161") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 161") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:1292)) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097,1098") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress=Pe1097") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wcast-qual") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("clang diagnostic ignored \"-Wcast-qual\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("warning(disable:2203 2331)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("GCC diagnostic ignored \"-Wcast-qual\"") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunused-function") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("clang diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("GCC diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(1,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION __pragma(warning(disable:4505)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("diag_suppress 3142") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif + +#if defined(JSON_HEDLEY_DEPRECATED) + #undef JSON_HEDLEY_DEPRECATED +#endif +#if defined(JSON_HEDLEY_DEPRECATED_FOR) + #undef JSON_HEDLEY_DEPRECATED_FOR +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated("Since " # since)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated("Since " #since "; use " #replacement)) +#elif \ + (JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__("Since " #since))) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__("Since " #since "; use " #replacement))) +#elif defined(__cplusplus) && (__cplusplus >= 201402L) + #define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]]) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(deprecated) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated") + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) _Pragma("deprecated") +#else + #define JSON_HEDLEY_DEPRECATED(since) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) +#endif + +#if defined(JSON_HEDLEY_UNAVAILABLE) + #undef JSON_HEDLEY_UNAVAILABLE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warning) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNAVAILABLE(available_since) __attribute__((__warning__("Not available until " #available_since))) +#else + #define JSON_HEDLEY_UNAVAILABLE(available_since) +#endif + +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT +#endif +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT_MSG) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__)) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) __attribute__((__warn_unused_result__)) +#elif (JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) >= 201907L) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard(msg)]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) +#elif defined(_Check_return_) /* SAL */ + #define JSON_HEDLEY_WARN_UNUSED_RESULT _Check_return_ + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) _Check_return_ +#else + #define JSON_HEDLEY_WARN_UNUSED_RESULT + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) +#endif + +#if defined(JSON_HEDLEY_SENTINEL) + #undef JSON_HEDLEY_SENTINEL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(sentinel) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_SENTINEL(position) __attribute__((__sentinel__(position))) +#else + #define JSON_HEDLEY_SENTINEL(position) +#endif + +#if defined(JSON_HEDLEY_NO_RETURN) + #undef JSON_HEDLEY_NO_RETURN +#endif +#if JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NO_RETURN __noreturn +#elif \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L + #define JSON_HEDLEY_NO_RETURN _Noreturn +#elif defined(__cplusplus) && (__cplusplus >= 201103L) + #define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NO_RETURN _Pragma("FUNC_NEVER_RETURNS;") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NO_RETURN __attribute((noreturn)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#else + #define JSON_HEDLEY_NO_RETURN +#endif + +#if defined(JSON_HEDLEY_NO_ESCAPE) + #undef JSON_HEDLEY_NO_ESCAPE +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape) + #define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__)) +#else + #define JSON_HEDLEY_NO_ESCAPE +#endif + +#if defined(JSON_HEDLEY_UNREACHABLE) + #undef JSON_HEDLEY_UNREACHABLE +#endif +#if defined(JSON_HEDLEY_UNREACHABLE_RETURN) + #undef JSON_HEDLEY_UNREACHABLE_RETURN +#endif +#if defined(JSON_HEDLEY_ASSUME) + #undef JSON_HEDLEY_ASSUME +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_ASSUME(expr) __assume(expr) +#elif JSON_HEDLEY_HAS_BUILTIN(__builtin_assume) + #define JSON_HEDLEY_ASSUME(expr) __builtin_assume(expr) +#elif \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #if defined(__cplusplus) + #define JSON_HEDLEY_ASSUME(expr) std::_nassert(expr) + #else + #define JSON_HEDLEY_ASSUME(expr) _nassert(expr) + #endif +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_unreachable) && (!defined(JSON_HEDLEY_ARM_VERSION))) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,5) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(10,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNREACHABLE() __builtin_unreachable() +#elif defined(JSON_HEDLEY_ASSUME) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif +#if !defined(JSON_HEDLEY_ASSUME) + #if defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, ((expr) ? 1 : (JSON_HEDLEY_UNREACHABLE(), 1))) + #else + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, expr) + #endif +#endif +#if defined(JSON_HEDLEY_UNREACHABLE) + #if \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (JSON_HEDLEY_STATIC_CAST(void, JSON_HEDLEY_ASSUME(0)), (value)) + #else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) JSON_HEDLEY_UNREACHABLE() + #endif +#else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (value) +#endif +#if !defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif + +JSON_HEDLEY_DIAGNOSTIC_PUSH +#if JSON_HEDLEY_HAS_WARNING("-Wpedantic") + #pragma clang diagnostic ignored "-Wpedantic" +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus) + #pragma clang diagnostic ignored "-Wc++98-compat-pedantic" +#endif +#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0) + #if defined(__clang__) + #pragma clang diagnostic ignored "-Wvariadic-macros" + #elif defined(JSON_HEDLEY_GCC_VERSION) + #pragma GCC diagnostic ignored "-Wvariadic-macros" + #endif +#endif +#if defined(JSON_HEDLEY_NON_NULL) + #undef JSON_HEDLEY_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NON_NULL(...) __attribute__((__nonnull__(__VA_ARGS__))) +#else + #define JSON_HEDLEY_NON_NULL(...) +#endif +JSON_HEDLEY_DIAGNOSTIC_POP + +#if defined(JSON_HEDLEY_PRINTF_FORMAT) + #undef JSON_HEDLEY_PRINTF_FORMAT +#endif +#if defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && !defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(ms_printf, string_idx, first_to_check))) +#elif defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(gnu_printf, string_idx, first_to_check))) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(format) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(__printf__, string_idx, first_to_check))) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(6,0,0) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __declspec(vaformat(printf,string_idx,first_to_check)) +#else + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) +#endif + +#if defined(JSON_HEDLEY_CONSTEXPR) + #undef JSON_HEDLEY_CONSTEXPR +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr) + #endif +#endif +#if !defined(JSON_HEDLEY_CONSTEXPR) + #define JSON_HEDLEY_CONSTEXPR +#endif + +#if defined(JSON_HEDLEY_PREDICT) + #undef JSON_HEDLEY_PREDICT +#endif +#if defined(JSON_HEDLEY_LIKELY) + #undef JSON_HEDLEY_LIKELY +#endif +#if defined(JSON_HEDLEY_UNLIKELY) + #undef JSON_HEDLEY_UNLIKELY +#endif +#if defined(JSON_HEDLEY_UNPREDICTABLE) + #undef JSON_HEDLEY_UNPREDICTABLE +#endif +#if JSON_HEDLEY_HAS_BUILTIN(__builtin_unpredictable) + #define JSON_HEDLEY_UNPREDICTABLE(expr) __builtin_unpredictable((expr)) +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect_with_probability) && !defined(JSON_HEDLEY_PGI_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(9,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, value, probability) __builtin_expect_with_probability( (expr), (value), (probability)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) __builtin_expect_with_probability(!!(expr), 1 , (probability)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) __builtin_expect_with_probability(!!(expr), 0 , (probability)) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect (!!(expr), 1 ) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect (!!(expr), 0 ) +#elif \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,27) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, expected, probability) \ + (((probability) >= 0.9) ? __builtin_expect((expr), (expected)) : (JSON_HEDLEY_STATIC_CAST(void, expected), (expr))) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 1) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 0) : !!(expr))); \ + })) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 0) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 1) : !!(expr))); \ + })) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect(!!(expr), 1) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect(!!(expr), 0) +#else +# define JSON_HEDLEY_PREDICT(expr, expected, probability) (JSON_HEDLEY_STATIC_CAST(void, expected), (expr)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_LIKELY(expr) (!!(expr)) +# define JSON_HEDLEY_UNLIKELY(expr) (!!(expr)) +#endif +#if !defined(JSON_HEDLEY_UNPREDICTABLE) + #define JSON_HEDLEY_UNPREDICTABLE(expr) JSON_HEDLEY_PREDICT(expr, 1, 0.5) +#endif + +#if defined(JSON_HEDLEY_MALLOC) + #undef JSON_HEDLEY_MALLOC +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(malloc) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_MALLOC __attribute__((__malloc__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_MALLOC __declspec(restrict) +#else + #define JSON_HEDLEY_MALLOC +#endif + +#if defined(JSON_HEDLEY_PURE) + #undef JSON_HEDLEY_PURE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(pure) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,96,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PURE __attribute__((__pure__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) +# define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data") +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) \ + ) +# define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;") +#else +# define JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_CONST) + #undef JSON_HEDLEY_CONST +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(const) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_CONST __attribute__((__const__)) +#elif \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_CONST _Pragma("no_side_effect") +#else + #define JSON_HEDLEY_CONST JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_RESTRICT) + #undef JSON_HEDLEY_RESTRICT +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT restrict +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,4) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + defined(__clang__) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RESTRICT __restrict +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,3,0) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT _Restrict +#else + #define JSON_HEDLEY_RESTRICT +#endif + +#if defined(JSON_HEDLEY_INLINE) + #undef JSON_HEDLEY_INLINE +#endif +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + (defined(__cplusplus) && (__cplusplus >= 199711L)) + #define JSON_HEDLEY_INLINE inline +#elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(6,2,0) + #define JSON_HEDLEY_INLINE __inline__ +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,1,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_INLINE __inline +#else + #define JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_ALWAYS_INLINE) + #undef JSON_HEDLEY_ALWAYS_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(always_inline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) +# define JSON_HEDLEY_ALWAYS_INLINE __attribute__((__always_inline__)) JSON_HEDLEY_INLINE +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_ALWAYS_INLINE __forceinline +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) \ + ) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("FUNC_ALWAYS_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("inline=forced") +#else +# define JSON_HEDLEY_ALWAYS_INLINE JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_NEVER_INLINE) + #undef JSON_HEDLEY_NEVER_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(noinline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute__((__noinline__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(10,2,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("noinline") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("FUNC_CANNOT_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("inline=never") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute((noinline)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#else + #define JSON_HEDLEY_NEVER_INLINE +#endif + +#if defined(JSON_HEDLEY_PRIVATE) + #undef JSON_HEDLEY_PRIVATE +#endif +#if defined(JSON_HEDLEY_PUBLIC) + #undef JSON_HEDLEY_PUBLIC +#endif +#if defined(JSON_HEDLEY_IMPORT) + #undef JSON_HEDLEY_IMPORT +#endif +#if defined(_WIN32) || defined(__CYGWIN__) +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC __declspec(dllexport) +# define JSON_HEDLEY_IMPORT __declspec(dllimport) +#else +# if \ + JSON_HEDLEY_HAS_ATTRIBUTE(visibility) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + ( \ + defined(__TI_EABI__) && \ + ( \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) \ + ) \ + ) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PRIVATE __attribute__((__visibility__("hidden"))) +# define JSON_HEDLEY_PUBLIC __attribute__((__visibility__("default"))) +# else +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC +# endif +# define JSON_HEDLEY_IMPORT extern +#endif + +#if defined(JSON_HEDLEY_NO_THROW) + #undef JSON_HEDLEY_NO_THROW +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nothrow) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_THROW __attribute__((__nothrow__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NO_THROW __declspec(nothrow) +#else + #define JSON_HEDLEY_NO_THROW +#endif + +#if defined(JSON_HEDLEY_FALL_THROUGH) + #undef JSON_HEDLEY_FALL_THROUGH +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(fallthrough) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]]) +#elif defined(__fallthrough) /* SAL */ + #define JSON_HEDLEY_FALL_THROUGH __fallthrough +#else + #define JSON_HEDLEY_FALL_THROUGH +#endif + +#if defined(JSON_HEDLEY_RETURNS_NON_NULL) + #undef JSON_HEDLEY_RETURNS_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(returns_nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RETURNS_NON_NULL __attribute__((__returns_nonnull__)) +#elif defined(_Ret_notnull_) /* SAL */ + #define JSON_HEDLEY_RETURNS_NON_NULL _Ret_notnull_ +#else + #define JSON_HEDLEY_RETURNS_NON_NULL +#endif + +#if defined(JSON_HEDLEY_ARRAY_PARAM) + #undef JSON_HEDLEY_ARRAY_PARAM +#endif +#if \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ + !defined(__STDC_NO_VLA__) && \ + !defined(__cplusplus) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_ARRAY_PARAM(name) (name) +#else + #define JSON_HEDLEY_ARRAY_PARAM(name) +#endif + +#if defined(JSON_HEDLEY_IS_CONSTANT) + #undef JSON_HEDLEY_IS_CONSTANT +#endif +#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR) + #undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#endif +/* JSON_HEDLEY_IS_CONSTEXPR_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #undef JSON_HEDLEY_IS_CONSTEXPR_ +#endif +#if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,19) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr) +#endif +#if !defined(__cplusplus) +# if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_types_compatible_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) +#else + #include <stdint.h> + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) +#endif +# elif \ + ( \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && \ + !defined(JSON_HEDLEY_SUNPRO_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION)) || \ + (JSON_HEDLEY_HAS_EXTENSION(c_generic_selections) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) +#else + #include <stdint.h> + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) +#endif +# elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + defined(JSON_HEDLEY_INTEL_VERSION) || \ + defined(JSON_HEDLEY_TINYC_VERSION) || \ + defined(JSON_HEDLEY_TI_ARMCL_VERSION) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(18,12,0) || \ + defined(JSON_HEDLEY_TI_CL2000_VERSION) || \ + defined(JSON_HEDLEY_TI_CL6X_VERSION) || \ + defined(JSON_HEDLEY_TI_CL7X_VERSION) || \ + defined(JSON_HEDLEY_TI_CLPRU_VERSION) || \ + defined(__clang__) +# define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \ + sizeof(void) != \ + sizeof(*( \ + 1 ? \ + ((void*) ((expr) * 0L) ) : \ +((struct { char v[sizeof(void) * 2]; } *) 1) \ + ) \ + ) \ + ) +# endif +#endif +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1)) +#else + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) (0) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (expr) +#endif + +#if defined(JSON_HEDLEY_BEGIN_C_DECLS) + #undef JSON_HEDLEY_BEGIN_C_DECLS +#endif +#if defined(JSON_HEDLEY_END_C_DECLS) + #undef JSON_HEDLEY_END_C_DECLS +#endif +#if defined(JSON_HEDLEY_C_DECL) + #undef JSON_HEDLEY_C_DECL +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_BEGIN_C_DECLS extern "C" { + #define JSON_HEDLEY_END_C_DECLS } + #define JSON_HEDLEY_C_DECL extern "C" +#else + #define JSON_HEDLEY_BEGIN_C_DECLS + #define JSON_HEDLEY_END_C_DECLS + #define JSON_HEDLEY_C_DECL +#endif + +#if defined(JSON_HEDLEY_STATIC_ASSERT) + #undef JSON_HEDLEY_STATIC_ASSERT +#endif +#if \ + !defined(__cplusplus) && ( \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) || \ + (JSON_HEDLEY_HAS_FEATURE(c_static_assert) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(6,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + defined(_Static_assert) \ + ) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message) +#elif \ + (defined(__cplusplus) && (__cplusplus >= 201103L)) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message)) +#else +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) +#endif + +#if defined(JSON_HEDLEY_NULL) + #undef JSON_HEDLEY_NULL +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr) + #elif defined(NULL) + #define JSON_HEDLEY_NULL NULL + #else + #define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0) + #endif +#elif defined(NULL) + #define JSON_HEDLEY_NULL NULL +#else + #define JSON_HEDLEY_NULL ((void*) 0) +#endif + +#if defined(JSON_HEDLEY_MESSAGE) + #undef JSON_HEDLEY_MESSAGE +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_MESSAGE(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(message msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message msg) +#elif JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(_CRI message msg) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_WARNING) + #undef JSON_HEDLEY_WARNING +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_WARNING(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(clang warning msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,8,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(GCC warning msg) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_REQUIRE) + #undef JSON_HEDLEY_REQUIRE +#endif +#if defined(JSON_HEDLEY_REQUIRE_MSG) + #undef JSON_HEDLEY_REQUIRE_MSG +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if) +# if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat") +# define JSON_HEDLEY_REQUIRE(expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), #expr, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), msg, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error"))) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error"))) +# endif +#else +# define JSON_HEDLEY_REQUIRE(expr) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) +#endif + +#if defined(JSON_HEDLEY_FLAGS) + #undef JSON_HEDLEY_FLAGS +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(flag_enum) && (!defined(__cplusplus) || JSON_HEDLEY_HAS_WARNING("-Wbitfield-enum-conversion")) + #define JSON_HEDLEY_FLAGS __attribute__((__flag_enum__)) +#else + #define JSON_HEDLEY_FLAGS +#endif + +#if defined(JSON_HEDLEY_FLAGS_CAST) + #undef JSON_HEDLEY_FLAGS_CAST +#endif +#if JSON_HEDLEY_INTEL_VERSION_CHECK(19,0,0) +# define JSON_HEDLEY_FLAGS_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("warning(disable:188)") \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr) +#endif + +#if defined(JSON_HEDLEY_EMPTY_BASES) + #undef JSON_HEDLEY_EMPTY_BASES +#endif +#if \ + (JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0)) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases) +#else + #define JSON_HEDLEY_EMPTY_BASES +#endif + +/* Remaining macros are deprecated. */ + +#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#endif +#if defined(__clang__) + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) (0) +#else + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_CLANG_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_BUILTIN) + #undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#endif +#define JSON_HEDLEY_CLANG_HAS_BUILTIN(builtin) JSON_HEDLEY_HAS_BUILTIN(builtin) + +#if defined(JSON_HEDLEY_CLANG_HAS_FEATURE) + #undef JSON_HEDLEY_CLANG_HAS_FEATURE +#endif +#define JSON_HEDLEY_CLANG_HAS_FEATURE(feature) JSON_HEDLEY_HAS_FEATURE(feature) + +#if defined(JSON_HEDLEY_CLANG_HAS_EXTENSION) + #undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#endif +#define JSON_HEDLEY_CLANG_HAS_EXTENSION(extension) JSON_HEDLEY_HAS_EXTENSION(extension) + +#if defined(JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_DECLSPEC_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_WARNING) + #undef JSON_HEDLEY_CLANG_HAS_WARNING +#endif +#define JSON_HEDLEY_CLANG_HAS_WARNING(warning) JSON_HEDLEY_HAS_WARNING(warning) + +#endif /* !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < X) */ + +// #include <nlohmann/detail/meta/detected.hpp> + + +#include <type_traits> + +// #include <nlohmann/detail/meta/void_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename ...Ts> struct make_void +{ + using type = void; +}; +template<typename ...Ts> using void_t = typename make_void<Ts...>::type; +} // namespace detail +} // namespace nlohmann + + +// https://en.cppreference.com/w/cpp/experimental/is_detected +namespace nlohmann +{ +namespace detail +{ +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + nonesuch(nonesuch const&&) = delete; + void operator=(nonesuch const&) = delete; + void operator=(nonesuch&&) = delete; +}; + +template<class Default, + class AlwaysVoid, + template<class...> class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; + +template<class Default, template<class...> class Op, class... Args> +struct detector<Default, void_t<Op<Args...>>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op<Args...>; +}; + +template<template<class...> class Op, class... Args> +using is_detected = typename detector<nonesuch, void, Op, Args...>::value_t; + +template<template<class...> class Op, class... Args> +struct is_detected_lazy : is_detected<Op, Args...> { }; + +template<template<class...> class Op, class... Args> +using detected_t = typename detector<nonesuch, void, Op, Args...>::type; + +template<class Default, template<class...> class Op, class... Args> +using detected_or = detector<Default, void, Op, Args...>; + +template<class Default, template<class...> class Op, class... Args> +using detected_or_t = typename detected_or<Default, Op, Args...>::type; + +template<class Expected, template<class...> class Op, class... Args> +using is_detected_exact = std::is_same<Expected, detected_t<Op, Args...>>; + +template<class To, template<class...> class Op, class... Args> +using is_detected_convertible = + std::is_convertible<detected_t<Op, Args...>, To>; +} // namespace detail +} // namespace nlohmann + + +// This file contains all internal macro definitions +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + +// exclude unsupported compilers +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #endif +#endif + +// C++ language standard detection +// if the user manually specified the used c++ version this is skipped +#if !defined(JSON_HAS_CPP_20) && !defined(JSON_HAS_CPP_17) && !defined(JSON_HAS_CPP_14) && !defined(JSON_HAS_CPP_11) + #if (defined(__cplusplus) && __cplusplus >= 202002L) || (defined(_MSVC_LANG) && _MSVC_LANG >= 202002L) + #define JSON_HAS_CPP_20 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) + #define JSON_HAS_CPP_14 + #endif + // the cpp 11 flag is always specified because it is the minimal required version + #define JSON_HAS_CPP_11 +#endif + +#ifdef __has_include + #if __has_include(<version>) + #include <version> + #endif +#endif + +#if !defined(JSON_HAS_FILESYSTEM) && !defined(JSON_HAS_EXPERIMENTAL_FILESYSTEM) + #ifdef JSON_HAS_CPP_17 + #if defined(__cpp_lib_filesystem) + #define JSON_HAS_FILESYSTEM 1 + #elif defined(__cpp_lib_experimental_filesystem) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif !defined(__has_include) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif __has_include(<filesystem>) + #define JSON_HAS_FILESYSTEM 1 + #elif __has_include(<experimental/filesystem>) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #endif + + // std::filesystem does not work on MinGW GCC 8: https://sourceforge.net/p/mingw-w64/bugs/737/ + #if defined(__MINGW32__) && defined(__GNUC__) && __GNUC__ == 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before GCC 8: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before Clang 7: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__clang_major__) && __clang_major__ < 7 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before MSVC 19.14: https://en.cppreference.com/w/cpp/compiler_support + #if defined(_MSC_VER) && _MSC_VER < 1914 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before iOS 13 + #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED < 130000 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before macOS Catalina + #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101500 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + #endif +#endif + +#ifndef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_FILESYSTEM + #define JSON_HAS_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_THREE_WAY_COMPARISON + #if defined(__cpp_impl_three_way_comparison) && __cpp_impl_three_way_comparison >= 201907L \ + && defined(__cpp_lib_three_way_comparison) && __cpp_lib_three_way_comparison >= 201907L + #define JSON_HAS_THREE_WAY_COMPARISON 1 + #else + #define JSON_HAS_THREE_WAY_COMPARISON 0 + #endif +#endif + +#ifndef JSON_HAS_RANGES + // ranges header shipping in GCC 11.1.0 (released 2021-04-27) has syntax error + #if defined(__GLIBCXX__) && __GLIBCXX__ == 20210427 + #define JSON_HAS_RANGES 0 + #elif defined(__cpp_lib_ranges) + #define JSON_HAS_RANGES 1 + #else + #define JSON_HAS_RANGES 0 + #endif +#endif + +#ifdef JSON_HAS_CPP_17 + #define JSON_INLINE_VARIABLE inline +#else + #define JSON_INLINE_VARIABLE +#endif + +#if JSON_HEDLEY_HAS_ATTRIBUTE(no_unique_address) + #define JSON_NO_UNIQUE_ADDRESS [[no_unique_address]] +#else + #define JSON_NO_UNIQUE_ADDRESS +#endif + +// disable documentation warnings on clang +#if defined(__clang__) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdocumentation" + #pragma clang diagnostic ignored "-Wdocumentation-unknown-command" +#endif + +// allow disabling exceptions +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) + #define JSON_THROW(exception) throw exception + #define JSON_TRY try + #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) +#else + #include <cstdlib> + #define JSON_THROW(exception) std::abort() + #define JSON_TRY if(true) + #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER +#endif + +// allow overriding assert +#if !defined(JSON_ASSERT) + #include <cassert> // assert + #define JSON_ASSERT(x) assert(x) +#endif + +// allow to access some private functions (needed by the test suite) +#if defined(JSON_TESTS_PRIVATE) + #define JSON_PRIVATE_UNLESS_TESTED public +#else + #define JSON_PRIVATE_UNLESS_TESTED private +#endif + +/*! +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 +*/ +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template<typename BasicJsonType> \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template<typename BasicJsonType> \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [&j](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } + +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. + +#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ + template<template<typename, typename, typename...> class ObjectType, \ + template<typename, typename...> class ArrayType, \ + class StringType, class BooleanType, class NumberIntegerType, \ + class NumberUnsignedType, class NumberFloatType, \ + template<typename> class AllocatorType, \ + template<typename, typename = void> class JSONSerializer, \ + class BinaryType> + +#define NLOHMANN_BASIC_JSON_TPL \ + basic_json<ObjectType, ArrayType, StringType, BooleanType, \ + NumberIntegerType, NumberUnsignedType, NumberFloatType, \ + AllocatorType, JSONSerializer, BinaryType> + +// Macros to simplify conversion from/to types + +#define NLOHMANN_JSON_EXPAND( x ) x +#define NLOHMANN_JSON_GET_MACRO(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, NAME,...) NAME +#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \ + NLOHMANN_JSON_PASTE64, \ + NLOHMANN_JSON_PASTE63, \ + NLOHMANN_JSON_PASTE62, \ + NLOHMANN_JSON_PASTE61, \ + NLOHMANN_JSON_PASTE60, \ + NLOHMANN_JSON_PASTE59, \ + NLOHMANN_JSON_PASTE58, \ + NLOHMANN_JSON_PASTE57, \ + NLOHMANN_JSON_PASTE56, \ + NLOHMANN_JSON_PASTE55, \ + NLOHMANN_JSON_PASTE54, \ + NLOHMANN_JSON_PASTE53, \ + NLOHMANN_JSON_PASTE52, \ + NLOHMANN_JSON_PASTE51, \ + NLOHMANN_JSON_PASTE50, \ + NLOHMANN_JSON_PASTE49, \ + NLOHMANN_JSON_PASTE48, \ + NLOHMANN_JSON_PASTE47, \ + NLOHMANN_JSON_PASTE46, \ + NLOHMANN_JSON_PASTE45, \ + NLOHMANN_JSON_PASTE44, \ + NLOHMANN_JSON_PASTE43, \ + NLOHMANN_JSON_PASTE42, \ + NLOHMANN_JSON_PASTE41, \ + NLOHMANN_JSON_PASTE40, \ + NLOHMANN_JSON_PASTE39, \ + NLOHMANN_JSON_PASTE38, \ + NLOHMANN_JSON_PASTE37, \ + NLOHMANN_JSON_PASTE36, \ + NLOHMANN_JSON_PASTE35, \ + NLOHMANN_JSON_PASTE34, \ + NLOHMANN_JSON_PASTE33, \ + NLOHMANN_JSON_PASTE32, \ + NLOHMANN_JSON_PASTE31, \ + NLOHMANN_JSON_PASTE30, \ + NLOHMANN_JSON_PASTE29, \ + NLOHMANN_JSON_PASTE28, \ + NLOHMANN_JSON_PASTE27, \ + NLOHMANN_JSON_PASTE26, \ + NLOHMANN_JSON_PASTE25, \ + NLOHMANN_JSON_PASTE24, \ + NLOHMANN_JSON_PASTE23, \ + NLOHMANN_JSON_PASTE22, \ + NLOHMANN_JSON_PASTE21, \ + NLOHMANN_JSON_PASTE20, \ + NLOHMANN_JSON_PASTE19, \ + NLOHMANN_JSON_PASTE18, \ + NLOHMANN_JSON_PASTE17, \ + NLOHMANN_JSON_PASTE16, \ + NLOHMANN_JSON_PASTE15, \ + NLOHMANN_JSON_PASTE14, \ + NLOHMANN_JSON_PASTE13, \ + NLOHMANN_JSON_PASTE12, \ + NLOHMANN_JSON_PASTE11, \ + NLOHMANN_JSON_PASTE10, \ + NLOHMANN_JSON_PASTE9, \ + NLOHMANN_JSON_PASTE8, \ + NLOHMANN_JSON_PASTE7, \ + NLOHMANN_JSON_PASTE6, \ + NLOHMANN_JSON_PASTE5, \ + NLOHMANN_JSON_PASTE4, \ + NLOHMANN_JSON_PASTE3, \ + NLOHMANN_JSON_PASTE2, \ + NLOHMANN_JSON_PASTE1)(__VA_ARGS__)) +#define NLOHMANN_JSON_PASTE2(func, v1) func(v1) +#define NLOHMANN_JSON_PASTE3(func, v1, v2) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE2(func, v2) +#define NLOHMANN_JSON_PASTE4(func, v1, v2, v3) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE3(func, v2, v3) +#define NLOHMANN_JSON_PASTE5(func, v1, v2, v3, v4) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE4(func, v2, v3, v4) +#define NLOHMANN_JSON_PASTE6(func, v1, v2, v3, v4, v5) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE5(func, v2, v3, v4, v5) +#define NLOHMANN_JSON_PASTE7(func, v1, v2, v3, v4, v5, v6) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE6(func, v2, v3, v4, v5, v6) +#define NLOHMANN_JSON_PASTE8(func, v1, v2, v3, v4, v5, v6, v7) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE7(func, v2, v3, v4, v5, v6, v7) +#define NLOHMANN_JSON_PASTE9(func, v1, v2, v3, v4, v5, v6, v7, v8) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE8(func, v2, v3, v4, v5, v6, v7, v8) +#define NLOHMANN_JSON_PASTE10(func, v1, v2, v3, v4, v5, v6, v7, v8, v9) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE9(func, v2, v3, v4, v5, v6, v7, v8, v9) +#define NLOHMANN_JSON_PASTE11(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE10(func, v2, v3, v4, v5, v6, v7, v8, v9, v10) +#define NLOHMANN_JSON_PASTE12(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE11(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) +#define NLOHMANN_JSON_PASTE13(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE12(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) +#define NLOHMANN_JSON_PASTE14(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE13(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) +#define NLOHMANN_JSON_PASTE15(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE14(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) +#define NLOHMANN_JSON_PASTE16(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE15(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) +#define NLOHMANN_JSON_PASTE17(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE16(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) +#define NLOHMANN_JSON_PASTE18(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE17(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) +#define NLOHMANN_JSON_PASTE19(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE18(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) +#define NLOHMANN_JSON_PASTE20(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE19(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) +#define NLOHMANN_JSON_PASTE21(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE20(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) +#define NLOHMANN_JSON_PASTE22(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE21(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) +#define NLOHMANN_JSON_PASTE23(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE22(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) +#define NLOHMANN_JSON_PASTE24(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE23(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) +#define NLOHMANN_JSON_PASTE25(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE24(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) +#define NLOHMANN_JSON_PASTE26(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE25(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) +#define NLOHMANN_JSON_PASTE27(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE26(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) +#define NLOHMANN_JSON_PASTE28(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE27(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) +#define NLOHMANN_JSON_PASTE29(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE28(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) +#define NLOHMANN_JSON_PASTE30(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE29(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) +#define NLOHMANN_JSON_PASTE31(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE30(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) +#define NLOHMANN_JSON_PASTE32(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE31(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) +#define NLOHMANN_JSON_PASTE33(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE32(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) +#define NLOHMANN_JSON_PASTE34(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE33(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) +#define NLOHMANN_JSON_PASTE35(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE34(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) +#define NLOHMANN_JSON_PASTE36(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE35(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) +#define NLOHMANN_JSON_PASTE37(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE36(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) +#define NLOHMANN_JSON_PASTE38(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE37(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) +#define NLOHMANN_JSON_PASTE39(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE38(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) +#define NLOHMANN_JSON_PASTE40(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE39(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) +#define NLOHMANN_JSON_PASTE41(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE40(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) +#define NLOHMANN_JSON_PASTE42(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE41(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) +#define NLOHMANN_JSON_PASTE43(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE42(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) +#define NLOHMANN_JSON_PASTE44(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE43(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) +#define NLOHMANN_JSON_PASTE45(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE44(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) +#define NLOHMANN_JSON_PASTE46(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE45(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) +#define NLOHMANN_JSON_PASTE47(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE46(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) +#define NLOHMANN_JSON_PASTE48(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE47(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) +#define NLOHMANN_JSON_PASTE49(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE48(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) +#define NLOHMANN_JSON_PASTE50(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE49(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) +#define NLOHMANN_JSON_PASTE51(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE50(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) +#define NLOHMANN_JSON_PASTE52(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE51(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) +#define NLOHMANN_JSON_PASTE53(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE52(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) +#define NLOHMANN_JSON_PASTE54(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE53(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) +#define NLOHMANN_JSON_PASTE55(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE54(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) +#define NLOHMANN_JSON_PASTE56(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE55(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) +#define NLOHMANN_JSON_PASTE57(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE56(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) +#define NLOHMANN_JSON_PASTE58(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE57(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) +#define NLOHMANN_JSON_PASTE59(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE58(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) +#define NLOHMANN_JSON_PASTE60(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE59(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) +#define NLOHMANN_JSON_PASTE61(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE60(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) +#define NLOHMANN_JSON_PASTE62(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE61(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) +#define NLOHMANN_JSON_PASTE63(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE62(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) +#define NLOHMANN_JSON_PASTE64(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE63(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) + +#define NLOHMANN_JSON_TO(v1) nlohmann_json_j[#v1] = nlohmann_json_t.v1; +#define NLOHMANN_JSON_FROM(v1) nlohmann_json_j.at(#v1).get_to(nlohmann_json_t.v1); +#define NLOHMANN_JSON_FROM_WITH_DEFAULT(v1) nlohmann_json_t.v1 = nlohmann_json_j.value(#v1, nlohmann_json_default_obj.v1); + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + + +// inspired from https://stackoverflow.com/a/26745591 +// allows to call any std function as if (e.g. with begin): +// using std::begin; begin(x); +// +// it allows using the detected idiom to retrieve the return type +// of such an expression +#define NLOHMANN_CAN_CALL_STD_FUNC_IMPL(std_name) \ + namespace detail { \ + using std::std_name; \ + \ + template<typename... T> \ + using result_of_##std_name = decltype(std_name(std::declval<T>()...)); \ + } \ + \ + namespace detail2 { \ + struct std_name##_tag \ + { \ + }; \ + \ + template<typename... T> \ + std_name##_tag std_name(T&&...); \ + \ + template<typename... T> \ + using result_of_##std_name = decltype(std_name(std::declval<T>()...)); \ + \ + template<typename... T> \ + struct would_call_std_##std_name \ + { \ + static constexpr auto const value = ::nlohmann::detail:: \ + is_detected_exact<std_name##_tag, result_of_##std_name, T...>::value; \ + }; \ + } /* namespace detail2 */ \ + \ + template<typename... T> \ + struct would_call_std_##std_name : detail2::would_call_std_##std_name<T...> \ + { \ + } + +#ifndef JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_USE_IMPLICIT_CONVERSIONS 1 +#endif + +#if JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_EXPLICIT +#else + #define JSON_EXPLICIT explicit +#endif + +#ifndef JSON_DIAGNOSTICS + #define JSON_DIAGNOSTICS 0 +#endif + +#ifndef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + #define JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON 0 +#endif + +#if JSON_HAS_THREE_WAY_COMPARISON + #include <compare> // partial_ordering +#endif + +namespace nlohmann +{ +namespace detail +{ +/////////////////////////// +// JSON type enumeration // +/////////////////////////// + +/*! +@brief the JSON type enumeration + +This enumeration collects the different JSON types. It is internally used to +distinguish the stored values, and the functions @ref basic_json::is_null(), +@ref basic_json::is_object(), @ref basic_json::is_array(), +@ref basic_json::is_string(), @ref basic_json::is_boolean(), +@ref basic_json::is_number() (with @ref basic_json::is_number_integer(), +@ref basic_json::is_number_unsigned(), and @ref basic_json::is_number_float()), +@ref basic_json::is_discarded(), @ref basic_json::is_primitive(), and +@ref basic_json::is_structured() rely on it. + +@note There are three enumeration entries (number_integer, number_unsigned, and +number_float), because the library distinguishes these three types for numbers: +@ref basic_json::number_unsigned_t is used for unsigned integers, +@ref basic_json::number_integer_t is used for signed integers, and +@ref basic_json::number_float_t is used for floating-point numbers or to +approximate integers which do not fit in the limits of their respective type. + +@sa see @ref basic_json::basic_json(const value_t value_type) -- create a JSON +value with the default value for a given type + +@since version 1.0.0 +*/ +enum class value_t : std::uint8_t +{ + null, ///< null value + object, ///< object (unordered set of name/value pairs) + array, ///< array (ordered collection of values) + string, ///< string value + boolean, ///< boolean value + number_integer, ///< number value (signed integer) + number_unsigned, ///< number value (unsigned integer) + number_float, ///< number value (floating-point) + binary, ///< binary array (ordered collection of bytes) + discarded ///< discarded by the parser callback function +}; + +/*! +@brief comparison operator for JSON types + +Returns an ordering that is similar to Python: +- order: null < boolean < number < object < array < string < binary +- furthermore, each type is not smaller than itself +- discarded values are not comparable +- binary is represented as a b"" string in python and directly comparable to a + string; however, making a binary array directly comparable with a string would + be surprising behavior in a JSON file. + +@since version 1.0.0 +*/ +#if JSON_HAS_THREE_WAY_COMPARISON + inline std::partial_ordering operator<=>(const value_t lhs, const value_t rhs) noexcept // *NOPAD* +#else + inline bool operator<(const value_t lhs, const value_t rhs) noexcept +#endif +{ + static constexpr std::array<std::uint8_t, 9> order = {{ + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */, + 6 /* binary */ + } + }; + + const auto l_index = static_cast<std::size_t>(lhs); + const auto r_index = static_cast<std::size_t>(rhs); +#if JSON_HAS_THREE_WAY_COMPARISON + if (l_index < order.size() && r_index < order.size()) + { + return order[l_index] <=> order[r_index]; // *NOPAD* + } + return std::partial_ordering::unordered; +#else + return l_index < order.size() && r_index < order.size() && order[l_index] < order[r_index]; +#endif +} + +// GCC selects the built-in operator< over an operator rewritten from +// a user-defined spaceship operator +// Clang, MSVC, and ICC select the rewritten candidate +// (see GCC bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105200) +#if JSON_HAS_THREE_WAY_COMPARISON && defined(__GNUC__) +inline bool operator<(const value_t lhs, const value_t rhs) noexcept +{ + return std::is_lt(lhs <=> rhs); // *NOPAD* +} +#endif +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_escape.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/*! +@brief replace all occurrences of a substring by another string + +@param[in,out] s the string to manipulate; changed so that all + occurrences of @a f are replaced with @a t +@param[in] f the substring to replace with @a t +@param[in] t the string to replace @a f + +@pre The search string @a f must not be empty. **This precondition is +enforced with an assertion.** + +@since version 2.0.0 +*/ +template<typename StringType> +inline void replace_substring(StringType& s, const StringType& f, + const StringType& t) +{ + JSON_ASSERT(!f.empty()); + for (auto pos = s.find(f); // find first occurrence of f + pos != StringType::npos; // make sure f was found + s.replace(pos, f.size(), t), // replace with t, and + pos = s.find(f, pos + t.size())) // find next occurrence of f + {} +} + +/*! + * @brief string escaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to escape + * @return escaped string + * + * Note the order of escaping "~" to "~0" and "/" to "~1" is important. + */ +template<typename StringType> +inline StringType escape(StringType s) +{ + replace_substring(s, StringType{"~"}, StringType{"~0"}); + replace_substring(s, StringType{"/"}, StringType{"~1"}); + return s; +} + +/*! + * @brief string unescaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to unescape + * @return unescaped string + * + * Note the order of escaping "~1" to "/" and "~0" to "~" is important. + */ +template<typename StringType> +static void unescape(StringType& s) +{ + replace_substring(s, StringType{"~1"}, StringType{"/"}); + replace_substring(s, StringType{"~0"}, StringType{"~"}); +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/position_t.hpp> + + +#include <cstddef> // size_t + +namespace nlohmann +{ +namespace detail +{ +/// struct to capture the start position of the current token +struct position_t +{ + /// the total number of characters read + std::size_t chars_read_total = 0; + /// the number of characters read in the current line + std::size_t chars_read_current_line = 0; + /// the number of lines read + std::size_t lines_read = 0; + + /// conversion to size_t to preserve SAX interface + constexpr operator size_t() const + { + return chars_read_total; + } +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + + +#include <cstddef> // size_t +#include <type_traits> // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type +#include <utility> // index_sequence, make_index_sequence, index_sequence_for + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +template<typename T> +using uncvref_t = typename std::remove_cv<typename std::remove_reference<T>::type>::type; + +#ifdef JSON_HAS_CPP_14 + +// the following utilities are natively available in C++14 +using std::enable_if_t; +using std::index_sequence; +using std::make_index_sequence; +using std::index_sequence_for; + +#else + +// alias templates to reduce boilerplate +template<bool B, typename T = void> +using enable_if_t = typename std::enable_if<B, T>::type; + +// The following code is taken from https://github.com/abseil/abseil-cpp/blob/10cb35e459f5ecca5b2ff107635da0bfa41011b4/absl/utility/utility.h +// which is part of Google Abseil (https://github.com/abseil/abseil-cpp), licensed under the Apache License 2.0. + +//// START OF CODE FROM GOOGLE ABSEIL + +// integer_sequence +// +// Class template representing a compile-time integer sequence. An instantiation +// of `integer_sequence<T, Ints...>` has a sequence of integers encoded in its +// type through its template arguments (which is a common need when +// working with C++11 variadic templates). `absl::integer_sequence` is designed +// to be a drop-in replacement for C++14's `std::integer_sequence`. +// +// Example: +// +// template< class T, T... Ints > +// void user_function(integer_sequence<T, Ints...>); +// +// int main() +// { +// // user_function's `T` will be deduced to `int` and `Ints...` +// // will be deduced to `0, 1, 2, 3, 4`. +// user_function(make_integer_sequence<int, 5>()); +// } +template <typename T, T... Ints> +struct integer_sequence +{ + using value_type = T; + static constexpr std::size_t size() noexcept + { + return sizeof...(Ints); + } +}; + +// index_sequence +// +// A helper template for an `integer_sequence` of `size_t`, +// `absl::index_sequence` is designed to be a drop-in replacement for C++14's +// `std::index_sequence`. +template <size_t... Ints> +using index_sequence = integer_sequence<size_t, Ints...>; + +namespace utility_internal +{ + +template <typename Seq, size_t SeqSize, size_t Rem> +struct Extend; + +// Note that SeqSize == sizeof...(Ints). It's passed explicitly for efficiency. +template <typename T, T... Ints, size_t SeqSize> +struct Extend<integer_sequence<T, Ints...>, SeqSize, 0> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)... >; +}; + +template <typename T, T... Ints, size_t SeqSize> +struct Extend<integer_sequence<T, Ints...>, SeqSize, 1> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)..., 2 * SeqSize >; +}; + +// Recursion helper for 'make_integer_sequence<T, N>'. +// 'Gen<T, N>::type' is an alias for 'integer_sequence<T, 0, 1, ... N-1>'. +template <typename T, size_t N> +struct Gen +{ + using type = + typename Extend < typename Gen < T, N / 2 >::type, N / 2, N % 2 >::type; +}; + +template <typename T> +struct Gen<T, 0> +{ + using type = integer_sequence<T>; +}; + +} // namespace utility_internal + +// Compile-time sequences of integers + +// make_integer_sequence +// +// This template alias is equivalent to +// `integer_sequence<int, 0, 1, ..., N-1>`, and is designed to be a drop-in +// replacement for C++14's `std::make_integer_sequence`. +template <typename T, T N> +using make_integer_sequence = typename utility_internal::Gen<T, N>::type; + +// make_index_sequence +// +// This template alias is equivalent to `index_sequence<0, 1, ..., N-1>`, +// and is designed to be a drop-in replacement for C++14's +// `std::make_index_sequence`. +template <size_t N> +using make_index_sequence = make_integer_sequence<size_t, N>; + +// index_sequence_for +// +// Converts a typename pack into an index sequence of the same length, and +// is designed to be a drop-in replacement for C++14's +// `std::index_sequence_for()` +template <typename... Ts> +using index_sequence_for = make_index_sequence<sizeof...(Ts)>; + +//// END OF CODE FROM GOOGLE ABSEIL + +#endif + +// dispatch utility (taken from ranges-v3) +template<unsigned N> struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template<typename T> +struct static_const +{ + static constexpr T value{}; +}; + +#ifndef JSON_HAS_CPP_17 + + template<typename T> + constexpr T static_const<T>::value; // NOLINT(readability-redundant-declaration) + +#endif + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +#include <limits> // numeric_limits +#include <type_traits> // false_type, is_constructible, is_integral, is_same, true_type +#include <utility> // declval +#include <tuple> // tuple + +// #include <nlohmann/detail/macro_scope.hpp> + + +// #include <nlohmann/detail/iterators/iterator_traits.hpp> + + +#include <iterator> // random_access_iterator_tag + +// #include <nlohmann/detail/meta/void_t.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename It, typename = void> +struct iterator_types {}; + +template<typename It> +struct iterator_types < + It, + void_t<typename It::difference_type, typename It::value_type, typename It::pointer, + typename It::reference, typename It::iterator_category >> +{ + using difference_type = typename It::difference_type; + using value_type = typename It::value_type; + using pointer = typename It::pointer; + using reference = typename It::reference; + using iterator_category = typename It::iterator_category; +}; + +// This is required as some compilers implement std::iterator_traits in a way that +// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. +template<typename T, typename = void> +struct iterator_traits +{ +}; + +template<typename T> +struct iterator_traits < T, enable_if_t < !std::is_pointer<T>::value >> + : iterator_types<T> +{ +}; + +template<typename T> +struct iterator_traits<T*, enable_if_t<std::is_object<T>::value>> +{ + using iterator_category = std::random_access_iterator_tag; + using value_type = T; + using difference_type = ptrdiff_t; + using pointer = T*; + using reference = T&; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/call_std/begin.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(begin); +} // namespace nlohmann + +// #include <nlohmann/detail/meta/call_std/end.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(end); +} // namespace nlohmann + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/detected.hpp> + +// #include <nlohmann/json_fwd.hpp> +#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_ +#define INCLUDE_NLOHMANN_JSON_FWD_HPP_ + +#include <cstdint> // int64_t, uint64_t +#include <map> // map +#include <memory> // allocator +#include <string> // string +#include <vector> // vector + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ +/*! +@brief default JSONSerializer template argument + +This serializer ignores the template arguments and uses ADL +([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) +for serialization. +*/ +template<typename T = void, typename SFINAE = void> +struct adl_serializer; + +/// a class to store JSON values +/// @sa https://json.nlohmann.me/api/basic_json/ +template<template<typename U, typename V, typename... Args> class ObjectType = + std::map, + template<typename U, typename... Args> class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template<typename U> class AllocatorType = std::allocator, + template<typename T, typename SFINAE = void> class JSONSerializer = + adl_serializer, + class BinaryType = std::vector<std::uint8_t>> +class basic_json; + +/// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document +/// @sa https://json.nlohmann.me/api/json_pointer/ +template<typename BasicJsonType> +class json_pointer; + +/*! +@brief default specialization +@sa https://json.nlohmann.me/api/json/ +*/ +using json = basic_json<>; + +/// @brief a minimal map-like container that preserves insertion order +/// @sa https://json.nlohmann.me/api/ordered_map/ +template<class Key, class T, class IgnoredLess, class Allocator> +struct ordered_map; + +/// @brief specialization that maintains the insertion order of object keys +/// @sa https://json.nlohmann.me/api/ordered_json/ +using ordered_json = basic_json<nlohmann::ordered_map>; + +} // namespace nlohmann + +#endif // INCLUDE_NLOHMANN_JSON_FWD_HPP_ + + +namespace nlohmann +{ +/*! +@brief detail namespace with internal helper functions + +This namespace collects functions that should not be exposed, +implementations of some @ref basic_json methods, and meta-programming helpers. + +@since version 2.1.0 +*/ +namespace detail +{ +///////////// +// helpers // +///////////// + +// Note to maintainers: +// +// Every trait in this file expects a non CV-qualified type. +// The only exceptions are in the 'aliases for detected' section +// (i.e. those of the form: decltype(T::member_function(std::declval<T>()))) +// +// In this case, T has to be properly CV-qualified to constraint the function arguments +// (e.g. to_json(BasicJsonType&, const T&)) + +template<typename> struct is_basic_json : std::false_type {}; + +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct is_basic_json<NLOHMANN_BASIC_JSON_TPL> : std::true_type {}; + +// used by exceptions create() member functions +// true_type for pointer to possibly cv-qualified basic_json or std::nullptr_t +// false_type otherwise +template<typename BasicJsonContext> +struct is_basic_json_context : + std::integral_constant < bool, + is_basic_json<typename std::remove_cv<typename std::remove_pointer<BasicJsonContext>::type>::type>::value + || std::is_same<BasicJsonContext, std::nullptr_t>::value > +{}; + +////////////////////// +// json_ref helpers // +////////////////////// + +template<typename> +class json_ref; + +template<typename> +struct is_json_ref : std::false_type {}; + +template<typename T> +struct is_json_ref<json_ref<T>> : std::true_type {}; + +////////////////////////// +// aliases for detected // +////////////////////////// + +template<typename T> +using mapped_type_t = typename T::mapped_type; + +template<typename T> +using key_type_t = typename T::key_type; + +template<typename T> +using value_type_t = typename T::value_type; + +template<typename T> +using difference_type_t = typename T::difference_type; + +template<typename T> +using pointer_t = typename T::pointer; + +template<typename T> +using reference_t = typename T::reference; + +template<typename T> +using iterator_category_t = typename T::iterator_category; + +template<typename T, typename... Args> +using to_json_function = decltype(T::to_json(std::declval<Args>()...)); + +template<typename T, typename... Args> +using from_json_function = decltype(T::from_json(std::declval<Args>()...)); + +template<typename T, typename U> +using get_template_function = decltype(std::declval<T>().template get<U>()); + +// trait checking if JSONSerializer<T>::from_json(json const&, udt&) exists +template<typename BasicJsonType, typename T, typename = void> +struct has_from_json : std::false_type {}; + +// trait checking if j.get<T> is valid +// use this trait instead of std::is_constructible or std::is_convertible, +// both rely on, or make use of implicit conversions, and thus fail when T +// has several constructors/operator= (see https://github.com/nlohmann/json/issues/958) +template <typename BasicJsonType, typename T> +struct is_getable +{ + static constexpr bool value = is_detected<get_template_function, const BasicJsonType&, T>::value; +}; + +template<typename BasicJsonType, typename T> +struct has_from_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<void, from_json_function, serializer, + const BasicJsonType&, T&>::value; +}; + +// This trait checks if JSONSerializer<T>::from_json(json const&) exists +// this overload is used for non-default-constructible user-defined-types +template<typename BasicJsonType, typename T, typename = void> +struct has_non_default_from_json : std::false_type {}; + +template<typename BasicJsonType, typename T> +struct has_non_default_from_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<T, from_json_function, serializer, + const BasicJsonType&>::value; +}; + +// This trait checks if BasicJsonType::json_serializer<T>::to_json exists +// Do not evaluate the trait when T is a basic_json type, to avoid template instantiation infinite recursion. +template<typename BasicJsonType, typename T, typename = void> +struct has_to_json : std::false_type {}; + +template<typename BasicJsonType, typename T> +struct has_to_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<void, to_json_function, serializer, BasicJsonType&, + T>::value; +}; + +template<typename T> +using detect_key_compare = typename T::key_compare; + +template<typename T> +struct has_key_compare : std::integral_constant<bool, is_detected<detect_key_compare, T>::value> {}; + +// obtains the actual object key comparator +template<typename BasicJsonType> +struct actual_object_comparator +{ + using object_t = typename BasicJsonType::object_t; + using object_comparator_t = typename BasicJsonType::default_object_comparator_t; + using type = typename std::conditional < has_key_compare<object_t>::value, + typename object_t::key_compare, object_comparator_t>::type; +}; + +template<typename BasicJsonType> +using actual_object_comparator_t = typename actual_object_comparator<BasicJsonType>::type; + +/////////////////// +// is_ functions // +/////////////////// + +// https://en.cppreference.com/w/cpp/types/conjunction +template<class...> struct conjunction : std::true_type { }; +template<class B> struct conjunction<B> : B { }; +template<class B, class... Bn> +struct conjunction<B, Bn...> +: std::conditional<bool(B::value), conjunction<Bn...>, B>::type {}; + +// https://en.cppreference.com/w/cpp/types/negation +template<class B> struct negation : std::integral_constant < bool, !B::value > { }; + +// Reimplementation of is_constructible and is_default_constructible, due to them being broken for +// std::pair and std::tuple until LWG 2367 fix (see https://cplusplus.github.io/LWG/lwg-defects.html#2367). +// This causes compile errors in e.g. clang 3.5 or gcc 4.9. +template <typename T> +struct is_default_constructible : std::is_default_constructible<T> {}; + +template <typename T1, typename T2> +struct is_default_constructible<std::pair<T1, T2>> + : conjunction<is_default_constructible<T1>, is_default_constructible<T2>> {}; + +template <typename T1, typename T2> +struct is_default_constructible<const std::pair<T1, T2>> + : conjunction<is_default_constructible<T1>, is_default_constructible<T2>> {}; + +template <typename... Ts> +struct is_default_constructible<std::tuple<Ts...>> + : conjunction<is_default_constructible<Ts>...> {}; + +template <typename... Ts> +struct is_default_constructible<const std::tuple<Ts...>> + : conjunction<is_default_constructible<Ts>...> {}; + + +template <typename T, typename... Args> +struct is_constructible : std::is_constructible<T, Args...> {}; + +template <typename T1, typename T2> +struct is_constructible<std::pair<T1, T2>> : is_default_constructible<std::pair<T1, T2>> {}; + +template <typename T1, typename T2> +struct is_constructible<const std::pair<T1, T2>> : is_default_constructible<const std::pair<T1, T2>> {}; + +template <typename... Ts> +struct is_constructible<std::tuple<Ts...>> : is_default_constructible<std::tuple<Ts...>> {}; + +template <typename... Ts> +struct is_constructible<const std::tuple<Ts...>> : is_default_constructible<const std::tuple<Ts...>> {}; + + +template<typename T, typename = void> +struct is_iterator_traits : std::false_type {}; + +template<typename T> +struct is_iterator_traits<iterator_traits<T>> +{ + private: + using traits = iterator_traits<T>; + + public: + static constexpr auto value = + is_detected<value_type_t, traits>::value && + is_detected<difference_type_t, traits>::value && + is_detected<pointer_t, traits>::value && + is_detected<iterator_category_t, traits>::value && + is_detected<reference_t, traits>::value; +}; + +template<typename T> +struct is_range +{ + private: + using t_ref = typename std::add_lvalue_reference<T>::type; + + using iterator = detected_t<result_of_begin, t_ref>; + using sentinel = detected_t<result_of_end, t_ref>; + + // to be 100% correct, it should use https://en.cppreference.com/w/cpp/iterator/input_or_output_iterator + // and https://en.cppreference.com/w/cpp/iterator/sentinel_for + // but reimplementing these would be too much work, as a lot of other concepts are used underneath + static constexpr auto is_iterator_begin = + is_iterator_traits<iterator_traits<iterator>>::value; + + public: + static constexpr bool value = !std::is_same<iterator, nonesuch>::value && !std::is_same<sentinel, nonesuch>::value && is_iterator_begin; +}; + +template<typename R> +using iterator_t = enable_if_t<is_range<R>::value, result_of_begin<decltype(std::declval<R&>())>>; + +template<typename T> +using range_value_t = value_type_t<iterator_traits<iterator_t<T>>>; + +// The following implementation of is_complete_type is taken from +// https://blogs.msdn.microsoft.com/vcblog/2015/12/02/partial-support-for-expression-sfinae-in-vs-2015-update-1/ +// and is written by Xiang Fan who agreed to using it in this library. + +template<typename T, typename = void> +struct is_complete_type : std::false_type {}; + +template<typename T> +struct is_complete_type<T, decltype(void(sizeof(T)))> : std::true_type {}; + +template<typename BasicJsonType, typename CompatibleObjectType, + typename = void> +struct is_compatible_object_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename CompatibleObjectType> +struct is_compatible_object_type_impl < + BasicJsonType, CompatibleObjectType, + enable_if_t < is_detected<mapped_type_t, CompatibleObjectType>::value&& + is_detected<key_type_t, CompatibleObjectType>::value >> +{ + using object_t = typename BasicJsonType::object_t; + + // macOS's is_constructible does not play well with nonesuch... + static constexpr bool value = + is_constructible<typename object_t::key_type, + typename CompatibleObjectType::key_type>::value && + is_constructible<typename object_t::mapped_type, + typename CompatibleObjectType::mapped_type>::value; +}; + +template<typename BasicJsonType, typename CompatibleObjectType> +struct is_compatible_object_type + : is_compatible_object_type_impl<BasicJsonType, CompatibleObjectType> {}; + +template<typename BasicJsonType, typename ConstructibleObjectType, + typename = void> +struct is_constructible_object_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename ConstructibleObjectType> +struct is_constructible_object_type_impl < + BasicJsonType, ConstructibleObjectType, + enable_if_t < is_detected<mapped_type_t, ConstructibleObjectType>::value&& + is_detected<key_type_t, ConstructibleObjectType>::value >> +{ + using object_t = typename BasicJsonType::object_t; + + static constexpr bool value = + (is_default_constructible<ConstructibleObjectType>::value && + (std::is_move_assignable<ConstructibleObjectType>::value || + std::is_copy_assignable<ConstructibleObjectType>::value) && + (is_constructible<typename ConstructibleObjectType::key_type, + typename object_t::key_type>::value && + std::is_same < + typename object_t::mapped_type, + typename ConstructibleObjectType::mapped_type >::value)) || + (has_from_json<BasicJsonType, + typename ConstructibleObjectType::mapped_type>::value || + has_non_default_from_json < + BasicJsonType, + typename ConstructibleObjectType::mapped_type >::value); +}; + +template<typename BasicJsonType, typename ConstructibleObjectType> +struct is_constructible_object_type + : is_constructible_object_type_impl<BasicJsonType, + ConstructibleObjectType> {}; + +template<typename BasicJsonType, typename CompatibleStringType> +struct is_compatible_string_type +{ + static constexpr auto value = + is_constructible<typename BasicJsonType::string_t, CompatibleStringType>::value; +}; + +template<typename BasicJsonType, typename ConstructibleStringType> +struct is_constructible_string_type +{ + // launder type through decltype() to fix compilation failure on ICPC +#ifdef __INTEL_COMPILER + using laundered_type = decltype(std::declval<ConstructibleStringType>()); +#else + using laundered_type = ConstructibleStringType; +#endif + + static constexpr auto value = + is_constructible<laundered_type, + typename BasicJsonType::string_t>::value; +}; + +template<typename BasicJsonType, typename CompatibleArrayType, typename = void> +struct is_compatible_array_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename CompatibleArrayType> +struct is_compatible_array_type_impl < + BasicJsonType, CompatibleArrayType, + enable_if_t < + is_detected<iterator_t, CompatibleArrayType>::value&& + is_iterator_traits<iterator_traits<detected_t<iterator_t, CompatibleArrayType>>>::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 + !std::is_same<CompatibleArrayType, detected_t<range_value_t, CompatibleArrayType>>::value >> +{ + static constexpr bool value = + is_constructible<BasicJsonType, + range_value_t<CompatibleArrayType>>::value; +}; + +template<typename BasicJsonType, typename CompatibleArrayType> +struct is_compatible_array_type + : is_compatible_array_type_impl<BasicJsonType, CompatibleArrayType> {}; + +template<typename BasicJsonType, typename ConstructibleArrayType, typename = void> +struct is_constructible_array_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t<std::is_same<ConstructibleArrayType, + typename BasicJsonType::value_type>::value >> + : std::true_type {}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t < !std::is_same<ConstructibleArrayType, + typename BasicJsonType::value_type>::value&& + !is_compatible_string_type<BasicJsonType, ConstructibleArrayType>::value&& + is_default_constructible<ConstructibleArrayType>::value&& +(std::is_move_assignable<ConstructibleArrayType>::value || + std::is_copy_assignable<ConstructibleArrayType>::value)&& +is_detected<iterator_t, ConstructibleArrayType>::value&& +is_iterator_traits<iterator_traits<detected_t<iterator_t, ConstructibleArrayType>>>::value&& +is_detected<range_value_t, ConstructibleArrayType>::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 +!std::is_same<ConstructibleArrayType, detected_t<range_value_t, ConstructibleArrayType>>::value&& + is_complete_type < + detected_t<range_value_t, ConstructibleArrayType >>::value >> +{ + using value_type = range_value_t<ConstructibleArrayType>; + + static constexpr bool value = + std::is_same<value_type, + typename BasicJsonType::array_t::value_type>::value || + has_from_json<BasicJsonType, + value_type>::value || + has_non_default_from_json < + BasicJsonType, + value_type >::value; +}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type + : is_constructible_array_type_impl<BasicJsonType, ConstructibleArrayType> {}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType, + typename = void> +struct is_compatible_integer_type_impl : std::false_type {}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType> +struct is_compatible_integer_type_impl < + RealIntegerType, CompatibleNumberIntegerType, + enable_if_t < std::is_integral<RealIntegerType>::value&& + std::is_integral<CompatibleNumberIntegerType>::value&& + !std::is_same<bool, CompatibleNumberIntegerType>::value >> +{ + // is there an assert somewhere on overflows? + using RealLimits = std::numeric_limits<RealIntegerType>; + using CompatibleLimits = std::numeric_limits<CompatibleNumberIntegerType>; + + static constexpr auto value = + is_constructible<RealIntegerType, + CompatibleNumberIntegerType>::value && + CompatibleLimits::is_integer && + RealLimits::is_signed == CompatibleLimits::is_signed; +}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType> +struct is_compatible_integer_type + : is_compatible_integer_type_impl<RealIntegerType, + CompatibleNumberIntegerType> {}; + +template<typename BasicJsonType, typename CompatibleType, typename = void> +struct is_compatible_type_impl: std::false_type {}; + +template<typename BasicJsonType, typename CompatibleType> +struct is_compatible_type_impl < + BasicJsonType, CompatibleType, + enable_if_t<is_complete_type<CompatibleType>::value >> +{ + static constexpr bool value = + has_to_json<BasicJsonType, CompatibleType>::value; +}; + +template<typename BasicJsonType, typename CompatibleType> +struct is_compatible_type + : is_compatible_type_impl<BasicJsonType, CompatibleType> {}; + +template<typename T1, typename T2> +struct is_constructible_tuple : std::false_type {}; + +template<typename T1, typename... Args> +struct is_constructible_tuple<T1, std::tuple<Args...>> : conjunction<is_constructible<T1, Args>...> {}; + +template<typename BasicJsonType, typename T> +struct is_json_iterator_of : std::false_type {}; + +template<typename BasicJsonType> +struct is_json_iterator_of<BasicJsonType, typename BasicJsonType::iterator> : std::true_type {}; + +template<typename BasicJsonType> +struct is_json_iterator_of<BasicJsonType, typename BasicJsonType::const_iterator> : std::true_type +{}; + +// checks if a given type T is a template specialization of Primary +template<template <typename...> class Primary, typename T> +struct is_specialization_of : std::false_type {}; + +template<template <typename...> class Primary, typename... Args> +struct is_specialization_of<Primary, Primary<Args...>> : std::true_type {}; + +template<typename T> +using is_json_pointer = is_specialization_of<::nlohmann::json_pointer, uncvref_t<T>>; + +// checks if A and B are comparable using Compare functor +template<typename Compare, typename A, typename B, typename = void> +struct is_comparable : std::false_type {}; + +template<typename Compare, typename A, typename B> +struct is_comparable<Compare, A, B, void_t< +decltype(std::declval<Compare>()(std::declval<A>(), std::declval<B>())), +decltype(std::declval<Compare>()(std::declval<B>(), std::declval<A>())) +>> : std::true_type {}; + +// checks if BasicJsonType::object_t::key_type and KeyType are comparable using Compare functor +template<typename BasicJsonType, typename KeyType> +using is_key_type_comparable = typename is_comparable < + typename BasicJsonType::object_comparator_t, + const key_type_t<typename BasicJsonType::object_t>&, + KeyType >::type; + +template<typename T> +using detect_is_transparent = typename T::is_transparent; + +// type trait to check if KeyType can be used as object key +// true if: +// - KeyType is comparable with BasicJsonType::object_t::key_type +// - if ExcludeObjectKeyType is true, KeyType is not BasicJsonType::object_t::key_type +// - the comparator is transparent or RequireTransparentComparator is false +// - KeyType is not a JSON iterator or json_pointer +template<typename BasicJsonType, typename KeyTypeCVRef, bool RequireTransparentComparator = true, + bool ExcludeObjectKeyType = RequireTransparentComparator, typename KeyType = uncvref_t<KeyTypeCVRef>> +using is_usable_as_key_type = typename std::conditional < + is_key_type_comparable<BasicJsonType, KeyTypeCVRef>::value + && !(ExcludeObjectKeyType && std::is_same<KeyType, + typename BasicJsonType::object_t::key_type>::value) + && (!RequireTransparentComparator || is_detected < + detect_is_transparent, + typename BasicJsonType::object_comparator_t >::value) + && !is_json_iterator_of<BasicJsonType, KeyType>::value + && !is_json_pointer<KeyType>::value, + std::true_type, + std::false_type >::type; + +template<typename ObjectType, typename KeyType> +using detect_erase_with_key_type = decltype(std::declval<ObjectType&>().erase(std::declval<KeyType>())); + +// type trait to check if object_t has an erase() member functions accepting KeyType +template<typename BasicJsonType, typename KeyType> +using has_erase_with_key_type = typename std::conditional < + is_detected < + detect_erase_with_key_type, + typename BasicJsonType::object_t, KeyType >::value, + std::true_type, + std::false_type >::type; + +// a naive helper to check if a type is an ordered_map (exploits the fact that +// ordered_map inherits capacity() from std::vector) +template <typename T> +struct is_ordered_map +{ + using one = char; + + struct two + { + char x[2]; // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + }; + + template <typename C> static one test( decltype(&C::capacity) ) ; + template <typename C> static two test(...); + + enum { value = sizeof(test<T>(nullptr)) == sizeof(char) }; // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) +}; + +// to avoid useless casts (see https://github.com/nlohmann/json/issues/2893#issuecomment-889152324) +template < typename T, typename U, enable_if_t < !std::is_same<T, U>::value, int > = 0 > +T conditional_static_cast(U value) +{ + return static_cast<T>(value); +} + +template<typename T, typename U, enable_if_t<std::is_same<T, U>::value, int> = 0> +T conditional_static_cast(U value) +{ + return value; +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_concat.hpp> + + +#include <cstring> // strlen +#include <string> // string +#include <utility> // forward + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/detected.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +inline std::size_t concat_length() +{ + return 0; +} + +template<typename... Args> +inline std::size_t concat_length(const char* cstr, Args&& ... rest); + +template<typename StringType, typename... Args> +inline std::size_t concat_length(const StringType& str, Args&& ... rest); + +template<typename... Args> +inline std::size_t concat_length(const char /*c*/, Args&& ... rest) +{ + return 1 + concat_length(std::forward<Args>(rest)...); +} + +template<typename... Args> +inline std::size_t concat_length(const char* cstr, Args&& ... rest) +{ + // cppcheck-suppress ignoredReturnValue + return ::strlen(cstr) + concat_length(std::forward<Args>(rest)...); +} + +template<typename StringType, typename... Args> +inline std::size_t concat_length(const StringType& str, Args&& ... rest) +{ + return str.size() + concat_length(std::forward<Args>(rest)...); +} + +template<typename OutStringType> +inline void concat_into(OutStringType& /*out*/) +{} + +template<typename StringType, typename Arg> +using string_can_append = decltype(std::declval<StringType&>().append(std::declval < Arg && > ())); + +template<typename StringType, typename Arg> +using detect_string_can_append = is_detected<string_can_append, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_op = decltype(std::declval<StringType&>() += std::declval < Arg && > ()); + +template<typename StringType, typename Arg> +using detect_string_can_append_op = is_detected<string_can_append_op, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_iter = decltype(std::declval<StringType&>().append(std::declval<const Arg&>().begin(), std::declval<const Arg&>().end())); + +template<typename StringType, typename Arg> +using detect_string_can_append_iter = is_detected<string_can_append_iter, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_data = decltype(std::declval<StringType&>().append(std::declval<const Arg&>().data(), std::declval<const Arg&>().size())); + +template<typename StringType, typename Arg> +using detect_string_can_append_data = is_detected<string_can_append_data, StringType, Arg>; + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && detect_string_can_append_op<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, Arg && arg, Args && ... rest); + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && detect_string_can_append_iter<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, const Arg& arg, Args && ... rest); + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && !detect_string_can_append_iter<OutStringType, Arg>::value + && detect_string_can_append_data<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, const Arg& arg, Args && ... rest); + +template<typename OutStringType, typename Arg, typename... Args, + enable_if_t<detect_string_can_append<OutStringType, Arg>::value, int> = 0> +inline void concat_into(OutStringType& out, Arg && arg, Args && ... rest) +{ + out.append(std::forward<Arg>(arg)); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && detect_string_can_append_op<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, Arg&& arg, Args&& ... rest) +{ + out += std::forward<Arg>(arg); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && detect_string_can_append_iter<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, const Arg& arg, Args&& ... rest) +{ + out.append(arg.begin(), arg.end()); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && !detect_string_can_append_iter<OutStringType, Arg>::value + && detect_string_can_append_data<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, const Arg& arg, Args&& ... rest) +{ + out.append(arg.data(), arg.size()); + concat_into(out, std::forward<Args>(rest)...); +} + +template<typename OutStringType = std::string, typename... Args> +inline OutStringType concat(Args && ... args) +{ + OutStringType str; + str.reserve(concat_length(std::forward<Args>(args)...)); + concat_into(str, std::forward<Args>(args)...); + return str; +} + +} // namespace detail +} // namespace nlohmann + + + +namespace nlohmann +{ +namespace detail +{ +//////////////// +// exceptions // +//////////////// + +/// @brief general exception of the @ref basic_json class +/// @sa https://json.nlohmann.me/api/basic_json/exception/ +class exception : public std::exception +{ + public: + /// returns the explanatory string + const char* what() const noexcept override + { + return m.what(); + } + + /// the id of the exception + const int id; // NOLINT(cppcoreguidelines-non-private-member-variables-in-classes) + + protected: + JSON_HEDLEY_NON_NULL(3) + exception(int id_, const char* what_arg) : id(id_), m(what_arg) {} // NOLINT(bugprone-throw-keyword-missing) + + static std::string name(const std::string& ename, int id_) + { + return concat("[json.exception.", ename, '.', std::to_string(id_), "] "); + } + + static std::string diagnostics(std::nullptr_t /*leaf_element*/) + { + return ""; + } + + template<typename BasicJsonType> + static std::string diagnostics(const BasicJsonType* leaf_element) + { +#if JSON_DIAGNOSTICS + std::vector<std::string> tokens; + for (const auto* current = leaf_element; current != nullptr && current->m_parent != nullptr; current = current->m_parent) + { + switch (current->m_parent->type()) + { + case value_t::array: + { + for (std::size_t i = 0; i < current->m_parent->m_value.array->size(); ++i) + { + if (¤t->m_parent->m_value.array->operator[](i) == current) + { + tokens.emplace_back(std::to_string(i)); + break; + } + } + break; + } + + case value_t::object: + { + for (const auto& element : *current->m_parent->m_value.object) + { + if (&element.second == current) + { + tokens.emplace_back(element.first.c_str()); + break; + } + } + break; + } + + case value_t::null: // LCOV_EXCL_LINE + case value_t::string: // LCOV_EXCL_LINE + case value_t::boolean: // LCOV_EXCL_LINE + case value_t::number_integer: // LCOV_EXCL_LINE + case value_t::number_unsigned: // LCOV_EXCL_LINE + case value_t::number_float: // LCOV_EXCL_LINE + case value_t::binary: // LCOV_EXCL_LINE + case value_t::discarded: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + break; // LCOV_EXCL_LINE + } + } + + if (tokens.empty()) + { + return ""; + } + + auto str = std::accumulate(tokens.rbegin(), tokens.rend(), std::string{}, + [](const std::string & a, const std::string & b) + { + return concat(a, '/', detail::escape(b)); + }); + return concat('(', str, ") "); +#else + static_cast<void>(leaf_element); + return ""; +#endif + } + + private: + /// an exception object as storage for error messages + std::runtime_error m; +}; + +/// @brief exception indicating a parse error +/// @sa https://json.nlohmann.me/api/basic_json/parse_error/ +class parse_error : public exception +{ + public: + /*! + @brief create a parse error exception + @param[in] id_ the id of the exception + @param[in] pos the position where the error occurred (or with + chars_read_total=0 if the position cannot be + determined) + @param[in] what_arg the explanatory string + @return parse_error object + */ + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static parse_error create(int id_, const position_t& pos, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("parse_error", id_), "parse error", + position_string(pos), ": ", exception::diagnostics(context), what_arg); + return {id_, pos.chars_read_total, w.c_str()}; + } + + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static parse_error create(int id_, std::size_t byte_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("parse_error", id_), "parse error", + (byte_ != 0 ? (concat(" at byte ", std::to_string(byte_))) : ""), + ": ", exception::diagnostics(context), what_arg); + return {id_, byte_, w.c_str()}; + } + + /*! + @brief byte index of the parse error + + The byte index of the last read character in the input file. + + @note For an input with n bytes, 1 is the index of the first character and + n+1 is the index of the terminating null byte or the end of file. + This also holds true when reading a byte vector (CBOR or MessagePack). + */ + const std::size_t byte; + + private: + parse_error(int id_, std::size_t byte_, const char* what_arg) + : exception(id_, what_arg), byte(byte_) {} + + static std::string position_string(const position_t& pos) + { + return concat(" at line ", std::to_string(pos.lines_read + 1), + ", column ", std::to_string(pos.chars_read_current_line)); + } +}; + +/// @brief exception indicating errors with iterators +/// @sa https://json.nlohmann.me/api/basic_json/invalid_iterator/ +class invalid_iterator : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static invalid_iterator create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("invalid_iterator", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + invalid_iterator(int id_, const char* what_arg) + : exception(id_, what_arg) {} +}; + +/// @brief exception indicating executing a member function with a wrong type +/// @sa https://json.nlohmann.me/api/basic_json/type_error/ +class type_error : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static type_error create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("type_error", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + type_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/// @brief exception indicating access out of the defined range +/// @sa https://json.nlohmann.me/api/basic_json/out_of_range/ +class out_of_range : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static out_of_range create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("out_of_range", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + out_of_range(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/// @brief exception indicating other library errors +/// @sa https://json.nlohmann.me/api/basic_json/other_error/ +class other_error : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static other_error create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("other_error", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + other_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/identity_tag.hpp> + + +namespace nlohmann +{ +namespace detail +{ +// dispatching helper struct +template <class T> struct identity_tag {}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +#if JSON_HAS_EXPERIMENTAL_FILESYSTEM +#include <experimental/filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::experimental::filesystem; +} // namespace nlohmann::detail +#elif JSON_HAS_FILESYSTEM +#include <filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::filesystem; +} // namespace nlohmann::detail +#endif + +namespace nlohmann +{ +namespace detail +{ +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename std::nullptr_t& n) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_null())) + { + JSON_THROW(type_error::create(302, concat("type must be null, but is ", j.type_name()), &j)); + } + n = nullptr; +} + +// overloads for basic_json template parameters +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < std::is_arithmetic<ArithmeticType>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value, + int > = 0 > +void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast<value_t>(j)) + { + case value_t::number_unsigned: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_unsigned_t*>()); + break; + } + case value_t::number_integer: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_integer_t*>()); + break; + } + case value_t::number_float: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_float_t*>()); + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::string: + case value_t::boolean: + case value_t::binary: + case value_t::discarded: + default: + JSON_THROW(type_error::create(302, concat("type must be number, but is ", j.type_name()), &j)); + } +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::boolean_t& b) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_boolean())) + { + JSON_THROW(type_error::create(302, concat("type must be boolean, but is ", j.type_name()), &j)); + } + b = *j.template get_ptr<const typename BasicJsonType::boolean_t*>(); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::string_t& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + s = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} + +template < + typename BasicJsonType, typename StringType, + enable_if_t < + std::is_assignable<StringType&, const typename BasicJsonType::string_t>::value + && !std::is_same<typename BasicJsonType::string_t, StringType>::value + && !is_json_ref<StringType>::value, int > = 0 > +inline void from_json(const BasicJsonType& j, StringType& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + + s = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_float_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_unsigned_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_integer_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType, typename EnumType, + enable_if_t<std::is_enum<EnumType>::value, int> = 0> +inline void from_json(const BasicJsonType& j, EnumType& e) +{ + typename std::underlying_type<EnumType>::type val; + get_arithmetic_value(j, val); + e = static_cast<EnumType>(val); +} + +// forward_list doesn't have an insert method +template<typename BasicJsonType, typename T, typename Allocator, + enable_if_t<is_getable<BasicJsonType, T>::value, int> = 0> +inline void from_json(const BasicJsonType& j, std::forward_list<T, Allocator>& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + l.clear(); + std::transform(j.rbegin(), j.rend(), + std::front_inserter(l), [](const BasicJsonType & i) + { + return i.template get<T>(); + }); +} + +// valarray doesn't have an insert method +template<typename BasicJsonType, typename T, + enable_if_t<is_getable<BasicJsonType, T>::value, int> = 0> +inline void from_json(const BasicJsonType& j, std::valarray<T>& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + l.resize(j.size()); + std::transform(j.begin(), j.end(), std::begin(l), + [](const BasicJsonType & elem) + { + return elem.template get<T>(); + }); +} + +template<typename BasicJsonType, typename T, std::size_t N> +auto from_json(const BasicJsonType& j, T (&arr)[N]) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +-> decltype(j.template get<T>(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get<T>(); + } +} + +template<typename BasicJsonType> +inline void from_json_array_impl(const BasicJsonType& j, typename BasicJsonType::array_t& arr, priority_tag<3> /*unused*/) +{ + arr = *j.template get_ptr<const typename BasicJsonType::array_t*>(); +} + +template<typename BasicJsonType, typename T, std::size_t N> +auto from_json_array_impl(const BasicJsonType& j, std::array<T, N>& arr, + priority_tag<2> /*unused*/) +-> decltype(j.template get<T>(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get<T>(); + } +} + +template<typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t< + std::is_assignable<ConstructibleArrayType&, ConstructibleArrayType>::value, + int> = 0> +auto from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, priority_tag<1> /*unused*/) +-> decltype( + arr.reserve(std::declval<typename ConstructibleArrayType::size_type>()), + j.template get<typename ConstructibleArrayType::value_type>(), + void()) +{ + using std::end; + + ConstructibleArrayType ret; + ret.reserve(j.size()); + std::transform(j.begin(), j.end(), + std::inserter(ret, end(ret)), [](const BasicJsonType & i) + { + // get<BasicJsonType>() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get<typename ConstructibleArrayType::value_type>(); + }); + arr = std::move(ret); +} + +template<typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t< + std::is_assignable<ConstructibleArrayType&, ConstructibleArrayType>::value, + int> = 0> +inline void from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, + priority_tag<0> /*unused*/) +{ + using std::end; + + ConstructibleArrayType ret; + std::transform( + j.begin(), j.end(), std::inserter(ret, end(ret)), + [](const BasicJsonType & i) + { + // get<BasicJsonType>() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get<typename ConstructibleArrayType::value_type>(); + }); + arr = std::move(ret); +} + +template < typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t < + is_constructible_array_type<BasicJsonType, ConstructibleArrayType>::value&& + !is_constructible_object_type<BasicJsonType, ConstructibleArrayType>::value&& + !is_constructible_string_type<BasicJsonType, ConstructibleArrayType>::value&& + !std::is_same<ConstructibleArrayType, typename BasicJsonType::binary_t>::value&& + !is_basic_json<ConstructibleArrayType>::value, + int > = 0 > +auto from_json(const BasicJsonType& j, ConstructibleArrayType& arr) +-> decltype(from_json_array_impl(j, arr, priority_tag<3> {}), +j.template get<typename ConstructibleArrayType::value_type>(), +void()) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + from_json_array_impl(j, arr, priority_tag<3> {}); +} + +template < typename BasicJsonType, typename T, std::size_t... Idx > +std::array<T, sizeof...(Idx)> from_json_inplace_array_impl(BasicJsonType&& j, + identity_tag<std::array<T, sizeof...(Idx)>> /*unused*/, index_sequence<Idx...> /*unused*/) +{ + return { { std::forward<BasicJsonType>(j).at(Idx).template get<T>()... } }; +} + +template < typename BasicJsonType, typename T, std::size_t N > +auto from_json(BasicJsonType&& j, identity_tag<std::array<T, N>> tag) +-> decltype(from_json_inplace_array_impl(std::forward<BasicJsonType>(j), tag, make_index_sequence<N> {})) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + return from_json_inplace_array_impl(std::forward<BasicJsonType>(j), tag, make_index_sequence<N> {}); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::binary_t& bin) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_binary())) + { + JSON_THROW(type_error::create(302, concat("type must be binary, but is ", j.type_name()), &j)); + } + + bin = *j.template get_ptr<const typename BasicJsonType::binary_t*>(); +} + +template<typename BasicJsonType, typename ConstructibleObjectType, + enable_if_t<is_constructible_object_type<BasicJsonType, ConstructibleObjectType>::value, int> = 0> +inline void from_json(const BasicJsonType& j, ConstructibleObjectType& obj) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_object())) + { + JSON_THROW(type_error::create(302, concat("type must be object, but is ", j.type_name()), &j)); + } + + ConstructibleObjectType ret; + const auto* inner_object = j.template get_ptr<const typename BasicJsonType::object_t*>(); + using value_type = typename ConstructibleObjectType::value_type; + std::transform( + inner_object->begin(), inner_object->end(), + std::inserter(ret, ret.begin()), + [](typename BasicJsonType::object_t::value_type const & p) + { + return value_type(p.first, p.second.template get<typename ConstructibleObjectType::mapped_type>()); + }); + obj = std::move(ret); +} + +// overload for arithmetic types, not chosen for basic_json template arguments +// (BooleanType, etc..); note: Is it really necessary to provide explicit +// overloads for boolean_t etc. in case of a custom BooleanType which is not +// an arithmetic type? +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < + std::is_arithmetic<ArithmeticType>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_unsigned_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_integer_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_float_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value, + int > = 0 > +inline void from_json(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast<value_t>(j)) + { + case value_t::number_unsigned: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_unsigned_t*>()); + break; + } + case value_t::number_integer: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_integer_t*>()); + break; + } + case value_t::number_float: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_float_t*>()); + break; + } + case value_t::boolean: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::boolean_t*>()); + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::string: + case value_t::binary: + case value_t::discarded: + default: + JSON_THROW(type_error::create(302, concat("type must be number, but is ", j.type_name()), &j)); + } +} + +template<typename BasicJsonType, typename... Args, std::size_t... Idx> +std::tuple<Args...> from_json_tuple_impl_base(BasicJsonType&& j, index_sequence<Idx...> /*unused*/) +{ + return std::make_tuple(std::forward<BasicJsonType>(j).at(Idx).template get<Args>()...); +} + +template < typename BasicJsonType, class A1, class A2 > +std::pair<A1, A2> from_json_tuple_impl(BasicJsonType&& j, identity_tag<std::pair<A1, A2>> /*unused*/, priority_tag<0> /*unused*/) +{ + return {std::forward<BasicJsonType>(j).at(0).template get<A1>(), + std::forward<BasicJsonType>(j).at(1).template get<A2>()}; +} + +template<typename BasicJsonType, typename A1, typename A2> +inline void from_json_tuple_impl(BasicJsonType&& j, std::pair<A1, A2>& p, priority_tag<1> /*unused*/) +{ + p = from_json_tuple_impl(std::forward<BasicJsonType>(j), identity_tag<std::pair<A1, A2>> {}, priority_tag<0> {}); +} + +template<typename BasicJsonType, typename... Args> +std::tuple<Args...> from_json_tuple_impl(BasicJsonType&& j, identity_tag<std::tuple<Args...>> /*unused*/, priority_tag<2> /*unused*/) +{ + return from_json_tuple_impl_base<BasicJsonType, Args...>(std::forward<BasicJsonType>(j), index_sequence_for<Args...> {}); +} + +template<typename BasicJsonType, typename... Args> +inline void from_json_tuple_impl(BasicJsonType&& j, std::tuple<Args...>& t, priority_tag<3> /*unused*/) +{ + t = from_json_tuple_impl_base<BasicJsonType, Args...>(std::forward<BasicJsonType>(j), index_sequence_for<Args...> {}); +} + +template<typename BasicJsonType, typename TupleRelated> +auto from_json(BasicJsonType&& j, TupleRelated&& t) +-> decltype(from_json_tuple_impl(std::forward<BasicJsonType>(j), std::forward<TupleRelated>(t), priority_tag<3> {})) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + return from_json_tuple_impl(std::forward<BasicJsonType>(j), std::forward<TupleRelated>(t), priority_tag<3> {}); +} + +template < typename BasicJsonType, typename Key, typename Value, typename Compare, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +inline void from_json(const BasicJsonType& j, std::map<Key, Value, Compare, Allocator>& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", p.type_name()), &j)); + } + m.emplace(p.at(0).template get<Key>(), p.at(1).template get<Value>()); + } +} + +template < typename BasicJsonType, typename Key, typename Value, typename Hash, typename KeyEqual, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +inline void from_json(const BasicJsonType& j, std::unordered_map<Key, Value, Hash, KeyEqual, Allocator>& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", p.type_name()), &j)); + } + m.emplace(p.at(0).template get<Key>(), p.at(1).template get<Value>()); + } +} + +#if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, std_fs::path& p) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + p = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} +#endif + +struct from_json_fn +{ + template<typename BasicJsonType, typename T> + auto operator()(const BasicJsonType& j, T&& val) const + noexcept(noexcept(from_json(j, std::forward<T>(val)))) + -> decltype(from_json(j, std::forward<T>(val))) + { + return from_json(j, std::forward<T>(val)); + } +}; +} // namespace detail + +#ifndef JSON_HAS_CPP_17 +/// namespace to hold default `from_json` function +/// to see why this is required: +/// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4381.html +namespace // NOLINT(cert-dcl59-cpp,fuchsia-header-anon-namespaces,google-build-namespaces) +{ +#endif +JSON_INLINE_VARIABLE constexpr const auto& from_json = // NOLINT(misc-definitions-in-headers) + detail::static_const<detail::from_json_fn>::value; +#ifndef JSON_HAS_CPP_17 +} // namespace +#endif +} // namespace nlohmann + +// #include <nlohmann/detail/conversions/to_json.hpp> + + +#include <algorithm> // copy +#include <iterator> // begin, end +#include <string> // string +#include <tuple> // tuple, get +#include <type_traits> // is_same, is_constructible, is_floating_point, is_enum, underlying_type +#include <utility> // move, forward, declval, pair +#include <valarray> // valarray +#include <vector> // vector + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/iterators/iteration_proxy.hpp> + + +#include <cstddef> // size_t +#include <iterator> // input_iterator_tag +#include <string> // string, to_string +#include <tuple> // tuple_size, get, tuple_element +#include <utility> // move + +#if JSON_HAS_RANGES + #include <ranges> // enable_borrowed_range +#endif + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename string_type> +void int_to_string( string_type& target, std::size_t value ) +{ + // For ADL + using std::to_string; + target = to_string(value); +} +template<typename IteratorType> class iteration_proxy_value +{ + public: + using difference_type = std::ptrdiff_t; + using value_type = iteration_proxy_value; + using pointer = value_type *; + using reference = value_type &; + using iterator_category = std::input_iterator_tag; + using string_type = typename std::remove_cv< typename std::remove_reference<decltype( std::declval<IteratorType>().key() ) >::type >::type; + + private: + /// the iterator + IteratorType anchor{}; + /// an index for arrays (used to create key names) + std::size_t array_index = 0; + /// last stringified array index + mutable std::size_t array_index_last = 0; + /// a string representation of the array index + mutable string_type array_index_str = "0"; + /// an empty string (to return a reference for primitive values) + string_type empty_str{}; + + public: + explicit iteration_proxy_value() = default; + explicit iteration_proxy_value(IteratorType it, std::size_t array_index_ = 0) + noexcept(std::is_nothrow_move_constructible<IteratorType>::value + && std::is_nothrow_default_constructible<string_type>::value) + : anchor(std::move(it)) + , array_index(array_index_) + {} + + iteration_proxy_value(iteration_proxy_value const&) = default; + iteration_proxy_value& operator=(iteration_proxy_value const&) = default; + // older GCCs are a bit fussy and require explicit noexcept specifiers on defaulted functions + iteration_proxy_value(iteration_proxy_value&&) + noexcept(std::is_nothrow_move_constructible<IteratorType>::value + && std::is_nothrow_move_constructible<string_type>::value) = default; + iteration_proxy_value& operator=(iteration_proxy_value&&) + noexcept(std::is_nothrow_move_assignable<IteratorType>::value + && std::is_nothrow_move_assignable<string_type>::value) = default; + ~iteration_proxy_value() = default; + + /// dereference operator (needed for range-based for) + const iteration_proxy_value& operator*() const + { + return *this; + } + + /// increment operator (needed for range-based for) + iteration_proxy_value& operator++() + { + ++anchor; + ++array_index; + + return *this; + } + + iteration_proxy_value operator++(int)& // NOLINT(cert-dcl21-cpp) + { + auto tmp = iteration_proxy_value(anchor, array_index); + ++anchor; + ++array_index; + return tmp; + } + + /// equality operator (needed for InputIterator) + bool operator==(const iteration_proxy_value& o) const + { + return anchor == o.anchor; + } + + /// inequality operator (needed for range-based for) + bool operator!=(const iteration_proxy_value& o) const + { + return anchor != o.anchor; + } + + /// return key of the iterator + const string_type& key() const + { + JSON_ASSERT(anchor.m_object != nullptr); + + switch (anchor.m_object->type()) + { + // use integer array index as key + case value_t::array: + { + if (array_index != array_index_last) + { + int_to_string( array_index_str, array_index ); + array_index_last = array_index; + } + return array_index_str; + } + + // use key from the object + case value_t::object: + return anchor.key(); + + // use an empty key for all primitive types + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return empty_str; + } + } + + /// return value of the iterator + typename IteratorType::reference value() const + { + return anchor.value(); + } +}; + +/// proxy class for the items() function +template<typename IteratorType> class iteration_proxy +{ + private: + /// the container to iterate + typename IteratorType::pointer container = nullptr; + + public: + explicit iteration_proxy() = default; + + /// construct iteration proxy from a container + explicit iteration_proxy(typename IteratorType::reference cont) noexcept + : container(&cont) {} + + iteration_proxy(iteration_proxy const&) = default; + iteration_proxy& operator=(iteration_proxy const&) = default; + iteration_proxy(iteration_proxy&&) noexcept = default; + iteration_proxy& operator=(iteration_proxy&&) noexcept = default; + ~iteration_proxy() = default; + + /// return iterator begin (needed for range-based for) + iteration_proxy_value<IteratorType> begin() const noexcept + { + return iteration_proxy_value<IteratorType>(container->begin()); + } + + /// return iterator end (needed for range-based for) + iteration_proxy_value<IteratorType> end() const noexcept + { + return iteration_proxy_value<IteratorType>(container->end()); + } +}; + +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template<std::size_t N, typename IteratorType, enable_if_t<N == 0, int> = 0> +auto get(const nlohmann::detail::iteration_proxy_value<IteratorType>& i) -> decltype(i.key()) +{ + return i.key(); +} +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template<std::size_t N, typename IteratorType, enable_if_t<N == 1, int> = 0> +auto get(const nlohmann::detail::iteration_proxy_value<IteratorType>& i) -> decltype(i.value()) +{ + return i.value(); +} +} // namespace detail +} // namespace nlohmann + +// The Addition to the STD Namespace is required to add +// Structured Bindings Support to the iteration_proxy_value class +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +namespace std +{ +#if defined(__clang__) + // Fix: https://github.com/nlohmann/json/issues/1401 + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wmismatched-tags" +#endif +template<typename IteratorType> +class tuple_size<::nlohmann::detail::iteration_proxy_value<IteratorType>> + : public std::integral_constant<std::size_t, 2> {}; + +template<std::size_t N, typename IteratorType> +class tuple_element<N, ::nlohmann::detail::iteration_proxy_value<IteratorType >> +{ + public: + using type = decltype( + get<N>(std::declval < + ::nlohmann::detail::iteration_proxy_value<IteratorType >> ())); +}; +#if defined(__clang__) + #pragma clang diagnostic pop +#endif +} // namespace std + +#if JSON_HAS_RANGES + template <typename IteratorType> + inline constexpr bool ::std::ranges::enable_borrowed_range<::nlohmann::detail::iteration_proxy<IteratorType>> = true; +#endif + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +#if JSON_HAS_EXPERIMENTAL_FILESYSTEM +#include <experimental/filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::experimental::filesystem; +} // namespace nlohmann::detail +#elif JSON_HAS_FILESYSTEM +#include <filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::filesystem; +} // namespace nlohmann::detail +#endif + +namespace nlohmann +{ +namespace detail +{ +////////////////// +// constructors // +////////////////// + +/* + * Note all external_constructor<>::construct functions need to call + * j.m_value.destroy(j.m_type) to avoid a memory leak in case j contains an + * allocated value (e.g., a string). See bug issue + * https://github.com/nlohmann/json/issues/2865 for more information. + */ + +template<value_t> struct external_constructor; + +template<> +struct external_constructor<value_t::boolean> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::boolean_t b) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::boolean; + j.m_value = b; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::string> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::string_t& s) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value = s; + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::string_t&& s) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value = std::move(s); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleStringType, + enable_if_t < !std::is_same<CompatibleStringType, typename BasicJsonType::string_t>::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleStringType& str) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value.string = j.template create<typename BasicJsonType::string_t>(str); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::binary> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::binary_t& b) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::binary; + j.m_value = typename BasicJsonType::binary_t(b); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::binary_t&& b) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::binary; + j.m_value = typename BasicJsonType::binary_t(std::move(b)); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_float> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_float_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_float; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_unsigned> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_unsigned_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_unsigned; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_integer> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_integer_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_integer; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::array> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::array_t& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = arr; + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::array_t&& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = std::move(arr); + j.set_parents(); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < !std::is_same<CompatibleArrayType, typename BasicJsonType::array_t>::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleArrayType& arr) + { + using std::begin; + using std::end; + + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value.array = j.template create<typename BasicJsonType::array_t>(begin(arr), end(arr)); + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const std::vector<bool>& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->reserve(arr.size()); + for (const bool x : arr) + { + j.m_value.array->push_back(x); + j.set_parent(j.m_value.array->back()); + } + j.assert_invariant(); + } + + template<typename BasicJsonType, typename T, + enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> + static void construct(BasicJsonType& j, const std::valarray<T>& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->resize(arr.size()); + if (arr.size() > 0) + { + std::copy(std::begin(arr), std::end(arr), j.m_value.array->begin()); + } + j.set_parents(); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::object> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::object_t& obj) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value = obj; + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::object_t&& obj) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value = std::move(obj); + j.set_parents(); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < !std::is_same<CompatibleObjectType, typename BasicJsonType::object_t>::value, int > = 0 > + static void construct(BasicJsonType& j, const CompatibleObjectType& obj) + { + using std::begin; + using std::end; + + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value.object = j.template create<typename BasicJsonType::object_t>(begin(obj), end(obj)); + j.set_parents(); + j.assert_invariant(); + } +}; + +///////////// +// to_json // +///////////// + +template<typename BasicJsonType, typename T, + enable_if_t<std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0> +inline void to_json(BasicJsonType& j, T b) noexcept +{ + external_constructor<value_t::boolean>::construct(j, b); +} + +template<typename BasicJsonType, typename CompatibleString, + enable_if_t<std::is_constructible<typename BasicJsonType::string_t, CompatibleString>::value, int> = 0> +inline void to_json(BasicJsonType& j, const CompatibleString& s) +{ + external_constructor<value_t::string>::construct(j, s); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s) +{ + external_constructor<value_t::string>::construct(j, std::move(s)); +} + +template<typename BasicJsonType, typename FloatType, + enable_if_t<std::is_floating_point<FloatType>::value, int> = 0> +inline void to_json(BasicJsonType& j, FloatType val) noexcept +{ + external_constructor<value_t::number_float>::construct(j, static_cast<typename BasicJsonType::number_float_t>(val)); +} + +template<typename BasicJsonType, typename CompatibleNumberUnsignedType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t, CompatibleNumberUnsignedType>::value, int> = 0> +inline void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept +{ + external_constructor<value_t::number_unsigned>::construct(j, static_cast<typename BasicJsonType::number_unsigned_t>(val)); +} + +template<typename BasicJsonType, typename CompatibleNumberIntegerType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t, CompatibleNumberIntegerType>::value, int> = 0> +inline void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept +{ + external_constructor<value_t::number_integer>::construct(j, static_cast<typename BasicJsonType::number_integer_t>(val)); +} + +template<typename BasicJsonType, typename EnumType, + enable_if_t<std::is_enum<EnumType>::value, int> = 0> +inline void to_json(BasicJsonType& j, EnumType e) noexcept +{ + using underlying_type = typename std::underlying_type<EnumType>::type; + external_constructor<value_t::number_integer>::construct(j, static_cast<underlying_type>(e)); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const std::vector<bool>& e) +{ + external_constructor<value_t::array>::construct(j, e); +} + +template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < is_compatible_array_type<BasicJsonType, + CompatibleArrayType>::value&& + !is_compatible_object_type<BasicJsonType, CompatibleArrayType>::value&& + !is_compatible_string_type<BasicJsonType, CompatibleArrayType>::value&& + !std::is_same<typename BasicJsonType::binary_t, CompatibleArrayType>::value&& + !is_basic_json<CompatibleArrayType>::value, + int > = 0 > +inline void to_json(BasicJsonType& j, const CompatibleArrayType& arr) +{ + external_constructor<value_t::array>::construct(j, arr); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const typename BasicJsonType::binary_t& bin) +{ + external_constructor<value_t::binary>::construct(j, bin); +} + +template<typename BasicJsonType, typename T, + enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> +inline void to_json(BasicJsonType& j, const std::valarray<T>& arr) +{ + external_constructor<value_t::array>::construct(j, std::move(arr)); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr) +{ + external_constructor<value_t::array>::construct(j, std::move(arr)); +} + +template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value&& !is_basic_json<CompatibleObjectType>::value, int > = 0 > +inline void to_json(BasicJsonType& j, const CompatibleObjectType& obj) +{ + external_constructor<value_t::object>::construct(j, obj); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj) +{ + external_constructor<value_t::object>::construct(j, std::move(obj)); +} + +template < + typename BasicJsonType, typename T, std::size_t N, + enable_if_t < !std::is_constructible<typename BasicJsonType::string_t, + const T(&)[N]>::value, // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + int > = 0 > +inline void to_json(BasicJsonType& j, const T(&arr)[N]) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +{ + external_constructor<value_t::array>::construct(j, arr); +} + +template < typename BasicJsonType, typename T1, typename T2, enable_if_t < std::is_constructible<BasicJsonType, T1>::value&& std::is_constructible<BasicJsonType, T2>::value, int > = 0 > +inline void to_json(BasicJsonType& j, const std::pair<T1, T2>& p) +{ + j = { p.first, p.second }; +} + +// for https://github.com/nlohmann/json/pull/1134 +template<typename BasicJsonType, typename T, + enable_if_t<std::is_same<T, iteration_proxy_value<typename BasicJsonType::iterator>>::value, int> = 0> +inline void to_json(BasicJsonType& j, const T& b) +{ + j = { {b.key(), b.value()} }; +} + +template<typename BasicJsonType, typename Tuple, std::size_t... Idx> +inline void to_json_tuple_impl(BasicJsonType& j, const Tuple& t, index_sequence<Idx...> /*unused*/) +{ + j = { std::get<Idx>(t)... }; +} + +template<typename BasicJsonType, typename T, enable_if_t<is_constructible_tuple<BasicJsonType, T>::value, int > = 0> +inline void to_json(BasicJsonType& j, const T& t) +{ + to_json_tuple_impl(j, t, make_index_sequence<std::tuple_size<T>::value> {}); +} + +#if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const std_fs::path& p) +{ + j = p.string(); +} +#endif + +struct to_json_fn +{ + template<typename BasicJsonType, typename T> + auto operator()(BasicJsonType& j, T&& val) const noexcept(noexcept(to_json(j, std::forward<T>(val)))) + -> decltype(to_json(j, std::forward<T>(val)), void()) + { + return to_json(j, std::forward<T>(val)); + } +}; +} // namespace detail + +#ifndef JSON_HAS_CPP_17 +/// namespace to hold default `to_json` function +/// to see why this is required: +/// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4381.html +namespace // NOLINT(cert-dcl59-cpp,fuchsia-header-anon-namespaces,google-build-namespaces) +{ +#endif +JSON_INLINE_VARIABLE constexpr const auto& to_json = // NOLINT(misc-definitions-in-headers) + detail::static_const<detail::to_json_fn>::value; +#ifndef JSON_HAS_CPP_17 +} // namespace +#endif +} // namespace nlohmann + +// #include <nlohmann/detail/meta/identity_tag.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ + +/// @sa https://json.nlohmann.me/api/adl_serializer/ +template<typename ValueType, typename> +struct adl_serializer +{ + /// @brief convert a JSON value to any value type + /// @sa https://json.nlohmann.me/api/adl_serializer/from_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto from_json(BasicJsonType && j, TargetType& val) noexcept( + noexcept(::nlohmann::from_json(std::forward<BasicJsonType>(j), val))) + -> decltype(::nlohmann::from_json(std::forward<BasicJsonType>(j), val), void()) + { + ::nlohmann::from_json(std::forward<BasicJsonType>(j), val); + } + + /// @brief convert a JSON value to any value type + /// @sa https://json.nlohmann.me/api/adl_serializer/from_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto from_json(BasicJsonType && j) noexcept( + noexcept(::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {}))) + -> decltype(::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {})) + { + return ::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {}); + } + + /// @brief convert any value type to a JSON value + /// @sa https://json.nlohmann.me/api/adl_serializer/to_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto to_json(BasicJsonType& j, TargetType && val) noexcept( + noexcept(::nlohmann::to_json(j, std::forward<TargetType>(val)))) + -> decltype(::nlohmann::to_json(j, std::forward<TargetType>(val)), void()) + { + ::nlohmann::to_json(j, std::forward<TargetType>(val)); + } +}; +} // namespace nlohmann + +// #include <nlohmann/byte_container_with_subtype.hpp> + + +#include <cstdint> // uint8_t, uint64_t +#include <tuple> // tie +#include <utility> // move + +namespace nlohmann +{ + +/// @brief an internal type for a backed binary type +/// @sa https://json.nlohmann.me/api/byte_container_with_subtype/ +template<typename BinaryType> +class byte_container_with_subtype : public BinaryType +{ + public: + using container_type = BinaryType; + using subtype_type = std::uint64_t; + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype() noexcept(noexcept(container_type())) + : container_type() + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(const container_type& b) noexcept(noexcept(container_type(b))) + : container_type(b) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(container_type&& b) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(const container_type& b, subtype_type subtype_) noexcept(noexcept(container_type(b))) + : container_type(b) + , m_subtype(subtype_) + , m_has_subtype(true) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(container_type&& b, subtype_type subtype_) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + , m_subtype(subtype_) + , m_has_subtype(true) + {} + + bool operator==(const byte_container_with_subtype& rhs) const + { + return std::tie(static_cast<const BinaryType&>(*this), m_subtype, m_has_subtype) == + std::tie(static_cast<const BinaryType&>(rhs), rhs.m_subtype, rhs.m_has_subtype); + } + + bool operator!=(const byte_container_with_subtype& rhs) const + { + return !(rhs == *this); + } + + /// @brief sets the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/set_subtype/ + void set_subtype(subtype_type subtype_) noexcept + { + m_subtype = subtype_; + m_has_subtype = true; + } + + /// @brief return the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/subtype/ + constexpr subtype_type subtype() const noexcept + { + return m_has_subtype ? m_subtype : static_cast<subtype_type>(-1); + } + + /// @brief return whether the value has a subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/has_subtype/ + constexpr bool has_subtype() const noexcept + { + return m_has_subtype; + } + + /// @brief clears the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/clear_subtype/ + void clear_subtype() noexcept + { + m_subtype = 0; + m_has_subtype = false; + } + + private: + subtype_type m_subtype = 0; + bool m_has_subtype = false; +}; + +} // namespace nlohmann + +// #include <nlohmann/detail/conversions/from_json.hpp> + +// #include <nlohmann/detail/conversions/to_json.hpp> + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/hash.hpp> + + +#include <cstdint> // uint8_t +#include <cstddef> // size_t +#include <functional> // hash + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +// boost::hash_combine +inline std::size_t combine(std::size_t seed, std::size_t h) noexcept +{ + seed ^= h + 0x9e3779b9 + (seed << 6U) + (seed >> 2U); + return seed; +} + +/*! +@brief hash a JSON value + +The hash function tries to rely on std::hash where possible. Furthermore, the +type of the JSON value is taken into account to have different hash values for +null, 0, 0U, and false, etc. + +@tparam BasicJsonType basic_json specialization +@param j JSON value to hash +@return hash value of j +*/ +template<typename BasicJsonType> +std::size_t hash(const BasicJsonType& j) +{ + using string_t = typename BasicJsonType::string_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + + const auto type = static_cast<std::size_t>(j.type()); + switch (j.type()) + { + case BasicJsonType::value_t::null: + case BasicJsonType::value_t::discarded: + { + return combine(type, 0); + } + + case BasicJsonType::value_t::object: + { + auto seed = combine(type, j.size()); + for (const auto& element : j.items()) + { + const auto h = std::hash<string_t> {}(element.key()); + seed = combine(seed, h); + seed = combine(seed, hash(element.value())); + } + return seed; + } + + case BasicJsonType::value_t::array: + { + auto seed = combine(type, j.size()); + for (const auto& element : j) + { + seed = combine(seed, hash(element)); + } + return seed; + } + + case BasicJsonType::value_t::string: + { + const auto h = std::hash<string_t> {}(j.template get_ref<const string_t&>()); + return combine(type, h); + } + + case BasicJsonType::value_t::boolean: + { + const auto h = std::hash<bool> {}(j.template get<bool>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_integer: + { + const auto h = std::hash<number_integer_t> {}(j.template get<number_integer_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_unsigned: + { + const auto h = std::hash<number_unsigned_t> {}(j.template get<number_unsigned_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_float: + { + const auto h = std::hash<number_float_t> {}(j.template get<number_float_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::binary: + { + auto seed = combine(type, j.get_binary().size()); + const auto h = std::hash<bool> {}(j.get_binary().has_subtype()); + seed = combine(seed, h); + seed = combine(seed, static_cast<std::size_t>(j.get_binary().subtype())); + for (const auto byte : j.get_binary()) + { + seed = combine(seed, std::hash<std::uint8_t> {}(byte)); + } + return seed; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return 0; // LCOV_EXCL_LINE + } +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/binary_reader.hpp> + + +#include <algorithm> // generate_n +#include <array> // array +#include <cmath> // ldexp +#include <cstddef> // size_t +#include <cstdint> // uint8_t, uint16_t, uint32_t, uint64_t +#include <cstdio> // snprintf +#include <cstring> // memcpy +#include <iterator> // back_inserter +#include <limits> // numeric_limits +#include <string> // char_traits, string +#include <utility> // make_pair, move +#include <vector> // vector +#include <map> // map + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/input/input_adapters.hpp> + + +#include <array> // array +#include <cstddef> // size_t +#include <cstring> // strlen +#include <iterator> // begin, end, iterator_traits, random_access_iterator_tag, distance, next +#include <memory> // shared_ptr, make_shared, addressof +#include <numeric> // accumulate +#include <string> // string, char_traits +#include <type_traits> // enable_if, is_base_of, is_pointer, is_integral, remove_pointer +#include <utility> // pair, declval + +#ifndef JSON_NO_IO + #include <cstdio> // FILE * + #include <istream> // istream +#endif // JSON_NO_IO + +// #include <nlohmann/detail/iterators/iterator_traits.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/// the supported input formats +enum class input_format_t { json, cbor, msgpack, ubjson, bson, bjdata }; + +//////////////////// +// input adapters // +//////////////////// + +#ifndef JSON_NO_IO +/*! +Input adapter for stdio file access. This adapter read only 1 byte and do not use any + buffer. This adapter is a very low level adapter. +*/ +class file_input_adapter +{ + public: + using char_type = char; + + JSON_HEDLEY_NON_NULL(2) + explicit file_input_adapter(std::FILE* f) noexcept + : m_file(f) + {} + + // make class move-only + file_input_adapter(const file_input_adapter&) = delete; + file_input_adapter(file_input_adapter&&) noexcept = default; + file_input_adapter& operator=(const file_input_adapter&) = delete; + file_input_adapter& operator=(file_input_adapter&&) = delete; + ~file_input_adapter() = default; + + std::char_traits<char>::int_type get_character() noexcept + { + return std::fgetc(m_file); + } + + private: + /// the file pointer to read from + std::FILE* m_file; +}; + + +/*! +Input adapter for a (caching) istream. Ignores a UFT Byte Order Mark at +beginning of input. Does not support changing the underlying std::streambuf +in mid-input. Maintains underlying std::istream and std::streambuf to support +subsequent use of standard std::istream operations to process any input +characters following those used in parsing the JSON input. Clears the +std::istream flags; any input errors (e.g., EOF) will be detected by the first +subsequent call for input from the std::istream. +*/ +class input_stream_adapter +{ + public: + using char_type = char; + + ~input_stream_adapter() + { + // clear stream flags; we use underlying streambuf I/O, do not + // maintain ifstream flags, except eof + if (is != nullptr) + { + is->clear(is->rdstate() & std::ios::eofbit); + } + } + + explicit input_stream_adapter(std::istream& i) + : is(&i), sb(i.rdbuf()) + {} + + // delete because of pointer members + input_stream_adapter(const input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&&) = delete; + + input_stream_adapter(input_stream_adapter&& rhs) noexcept + : is(rhs.is), sb(rhs.sb) + { + rhs.is = nullptr; + rhs.sb = nullptr; + } + + // std::istream/std::streambuf use std::char_traits<char>::to_int_type, to + // ensure that std::char_traits<char>::eof() and the character 0xFF do not + // end up as the same value, e.g. 0xFFFFFFFF. + std::char_traits<char>::int_type get_character() + { + auto res = sb->sbumpc(); + // set eof manually, as we don't use the istream interface. + if (JSON_HEDLEY_UNLIKELY(res == std::char_traits<char>::eof())) + { + is->clear(is->rdstate() | std::ios::eofbit); + } + return res; + } + + private: + /// the associated input stream + std::istream* is = nullptr; + std::streambuf* sb = nullptr; +}; +#endif // JSON_NO_IO + +// General-purpose iterator-based adapter. It might not be as fast as +// theoretically possible for some containers, but it is extremely versatile. +template<typename IteratorType> +class iterator_input_adapter +{ + public: + using char_type = typename std::iterator_traits<IteratorType>::value_type; + + iterator_input_adapter(IteratorType first, IteratorType last) + : current(std::move(first)), end(std::move(last)) + {} + + typename std::char_traits<char_type>::int_type get_character() + { + if (JSON_HEDLEY_LIKELY(current != end)) + { + auto result = std::char_traits<char_type>::to_int_type(*current); + std::advance(current, 1); + return result; + } + + return std::char_traits<char_type>::eof(); + } + + private: + IteratorType current; + IteratorType end; + + template<typename BaseInputAdapter, size_t T> + friend struct wide_string_input_helper; + + bool empty() const + { + return current == end; + } +}; + + +template<typename BaseInputAdapter, size_t T> +struct wide_string_input_helper; + +template<typename BaseInputAdapter> +struct wide_string_input_helper<BaseInputAdapter, 4> +{ + // UTF-32 + static void fill_buffer(BaseInputAdapter& input, + std::array<std::char_traits<char>::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits<char>::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-32 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xC0u | ((static_cast<unsigned int>(wc) >> 6u) & 0x1Fu)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (wc <= 0xFFFF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xE0u | ((static_cast<unsigned int>(wc) >> 12u) & 0x0Fu)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else if (wc <= 0x10FFFF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xF0u | ((static_cast<unsigned int>(wc) >> 18u) & 0x07u)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + // unknown character + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } +}; + +template<typename BaseInputAdapter> +struct wide_string_input_helper<BaseInputAdapter, 2> +{ + // UTF-16 + static void fill_buffer(BaseInputAdapter& input, + std::array<std::char_traits<char>::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits<char>::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-16 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xC0u | ((static_cast<unsigned int>(wc) >> 6u))); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (0xD800 > wc || wc >= 0xE000) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xE0u | ((static_cast<unsigned int>(wc) >> 12u))); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else + { + if (JSON_HEDLEY_UNLIKELY(!input.empty())) + { + const auto wc2 = static_cast<unsigned int>(input.get_character()); + const auto charcode = 0x10000u + (((static_cast<unsigned int>(wc) & 0x3FFu) << 10u) | (wc2 & 0x3FFu)); + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xF0u | (charcode >> 18u)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((charcode >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | ((charcode >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast<std::char_traits<char>::int_type>(0x80u | (charcode & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } + } +}; + +// Wraps another input apdater to convert wide character types into individual bytes. +template<typename BaseInputAdapter, typename WideCharType> +class wide_string_input_adapter +{ + public: + using char_type = char; + + wide_string_input_adapter(BaseInputAdapter base) + : base_adapter(base) {} + + typename std::char_traits<char>::int_type get_character() noexcept + { + // check if buffer needs to be filled + if (utf8_bytes_index == utf8_bytes_filled) + { + fill_buffer<sizeof(WideCharType)>(); + + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index == 0); + } + + // use buffer + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index < utf8_bytes_filled); + return utf8_bytes[utf8_bytes_index++]; + } + + private: + BaseInputAdapter base_adapter; + + template<size_t T> + void fill_buffer() + { + wide_string_input_helper<BaseInputAdapter, T>::fill_buffer(base_adapter, utf8_bytes, utf8_bytes_index, utf8_bytes_filled); + } + + /// a buffer for UTF-8 bytes + std::array<std::char_traits<char>::int_type, 4> utf8_bytes = {{0, 0, 0, 0}}; + + /// index to the utf8_codes array for the next valid byte + std::size_t utf8_bytes_index = 0; + /// number of valid bytes in the utf8_codes array + std::size_t utf8_bytes_filled = 0; +}; + + +template<typename IteratorType, typename Enable = void> +struct iterator_input_adapter_factory +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits<iterator_type>::value_type; + using adapter_type = iterator_input_adapter<iterator_type>; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(std::move(first), std::move(last)); + } +}; + +template<typename T> +struct is_iterator_of_multibyte +{ + using value_type = typename std::iterator_traits<T>::value_type; + enum + { + value = sizeof(value_type) > 1 + }; +}; + +template<typename IteratorType> +struct iterator_input_adapter_factory<IteratorType, enable_if_t<is_iterator_of_multibyte<IteratorType>::value>> +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits<iterator_type>::value_type; + using base_adapter_type = iterator_input_adapter<iterator_type>; + using adapter_type = wide_string_input_adapter<base_adapter_type, char_type>; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(base_adapter_type(std::move(first), std::move(last))); + } +}; + +// General purpose iterator-based input +template<typename IteratorType> +typename iterator_input_adapter_factory<IteratorType>::adapter_type input_adapter(IteratorType first, IteratorType last) +{ + using factory_type = iterator_input_adapter_factory<IteratorType>; + return factory_type::create(first, last); +} + +// Convenience shorthand from container to iterator +// Enables ADL on begin(container) and end(container) +// Encloses the using declarations in namespace for not to leak them to outside scope + +namespace container_input_adapter_factory_impl +{ + +using std::begin; +using std::end; + +template<typename ContainerType, typename Enable = void> +struct container_input_adapter_factory {}; + +template<typename ContainerType> +struct container_input_adapter_factory< ContainerType, + void_t<decltype(begin(std::declval<ContainerType>()), end(std::declval<ContainerType>()))>> + { + using adapter_type = decltype(input_adapter(begin(std::declval<ContainerType>()), end(std::declval<ContainerType>()))); + + static adapter_type create(const ContainerType& container) +{ + return input_adapter(begin(container), end(container)); +} + }; + +} // namespace container_input_adapter_factory_impl + +template<typename ContainerType> +typename container_input_adapter_factory_impl::container_input_adapter_factory<ContainerType>::adapter_type input_adapter(const ContainerType& container) +{ + return container_input_adapter_factory_impl::container_input_adapter_factory<ContainerType>::create(container); +} + +#ifndef JSON_NO_IO +// Special cases with fast paths +inline file_input_adapter input_adapter(std::FILE* file) +{ + return file_input_adapter(file); +} + +inline input_stream_adapter input_adapter(std::istream& stream) +{ + return input_stream_adapter(stream); +} + +inline input_stream_adapter input_adapter(std::istream&& stream) +{ + return input_stream_adapter(stream); +} +#endif // JSON_NO_IO + +using contiguous_bytes_input_adapter = decltype(input_adapter(std::declval<const char*>(), std::declval<const char*>())); + +// Null-delimited strings, and the like. +template < typename CharT, + typename std::enable_if < + std::is_pointer<CharT>::value&& + !std::is_array<CharT>::value&& + std::is_integral<typename std::remove_pointer<CharT>::type>::value&& + sizeof(typename std::remove_pointer<CharT>::type) == 1, + int >::type = 0 > +contiguous_bytes_input_adapter input_adapter(CharT b) +{ + auto length = std::strlen(reinterpret_cast<const char*>(b)); + const auto* ptr = reinterpret_cast<const char*>(b); + return input_adapter(ptr, ptr + length); +} + +template<typename T, std::size_t N> +auto input_adapter(T (&array)[N]) -> decltype(input_adapter(array, array + N)) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +{ + return input_adapter(array, array + N); +} + +// This class only handles inputs of input_buffer_adapter type. +// It's required so that expressions like {ptr, len} can be implicitly cast +// to the correct adapter. +class span_input_adapter +{ + public: + template < typename CharT, + typename std::enable_if < + std::is_pointer<CharT>::value&& + std::is_integral<typename std::remove_pointer<CharT>::type>::value&& + sizeof(typename std::remove_pointer<CharT>::type) == 1, + int >::type = 0 > + span_input_adapter(CharT b, std::size_t l) + : ia(reinterpret_cast<const char*>(b), reinterpret_cast<const char*>(b) + l) {} + + template<class IteratorType, + typename std::enable_if< + std::is_same<typename iterator_traits<IteratorType>::iterator_category, std::random_access_iterator_tag>::value, + int>::type = 0> + span_input_adapter(IteratorType first, IteratorType last) + : ia(input_adapter(first, last)) {} + + contiguous_bytes_input_adapter&& get() + { + return std::move(ia); // NOLINT(hicpp-move-const-arg,performance-move-const-arg) + } + + private: + contiguous_bytes_input_adapter ia; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/json_sax.hpp> + + +#include <cstddef> +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + + +namespace nlohmann +{ + +/*! +@brief SAX interface + +This class describes the SAX interface used by @ref nlohmann::json::sax_parse. +Each function is called in different situations while the input is parsed. The +boolean return value informs the parser whether to continue processing the +input. +*/ +template<typename BasicJsonType> +struct json_sax +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @brief a null value was read + @return whether parsing should proceed + */ + virtual bool null() = 0; + + /*! + @brief a boolean value was read + @param[in] val boolean value + @return whether parsing should proceed + */ + virtual bool boolean(bool val) = 0; + + /*! + @brief an integer number was read + @param[in] val integer value + @return whether parsing should proceed + */ + virtual bool number_integer(number_integer_t val) = 0; + + /*! + @brief an unsigned integer number was read + @param[in] val unsigned integer value + @return whether parsing should proceed + */ + virtual bool number_unsigned(number_unsigned_t val) = 0; + + /*! + @brief a floating-point number was read + @param[in] val floating-point value + @param[in] s raw token value + @return whether parsing should proceed + */ + virtual bool number_float(number_float_t val, const string_t& s) = 0; + + /*! + @brief a string value was read + @param[in] val string value + @return whether parsing should proceed + @note It is safe to move the passed string value. + */ + virtual bool string(string_t& val) = 0; + + /*! + @brief a binary value was read + @param[in] val binary value + @return whether parsing should proceed + @note It is safe to move the passed binary value. + */ + virtual bool binary(binary_t& val) = 0; + + /*! + @brief the beginning of an object was read + @param[in] elements number of object elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_object(std::size_t elements) = 0; + + /*! + @brief an object key was read + @param[in] val object key + @return whether parsing should proceed + @note It is safe to move the passed string. + */ + virtual bool key(string_t& val) = 0; + + /*! + @brief the end of an object was read + @return whether parsing should proceed + */ + virtual bool end_object() = 0; + + /*! + @brief the beginning of an array was read + @param[in] elements number of array elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_array(std::size_t elements) = 0; + + /*! + @brief the end of an array was read + @return whether parsing should proceed + */ + virtual bool end_array() = 0; + + /*! + @brief a parse error occurred + @param[in] position the position in the input where the error occurs + @param[in] last_token the last read token + @param[in] ex an exception object describing the error + @return whether parsing should proceed (must return false) + */ + virtual bool parse_error(std::size_t position, + const std::string& last_token, + const detail::exception& ex) = 0; + + json_sax() = default; + json_sax(const json_sax&) = default; + json_sax(json_sax&&) noexcept = default; + json_sax& operator=(const json_sax&) = default; + json_sax& operator=(json_sax&&) noexcept = default; + virtual ~json_sax() = default; +}; + + +namespace detail +{ +/*! +@brief SAX implementation to create a JSON value from SAX events + +This class implements the @ref json_sax interface and processes the SAX events +to create a JSON value which makes it basically a DOM parser. The structure or +hierarchy of the JSON value is managed by the stack `ref_stack` which contains +a pointer to the respective array or object for each recursion depth. + +After successful parsing, the value that is passed by reference to the +constructor contains the parsed value. + +@tparam BasicJsonType the JSON type +*/ +template<typename BasicJsonType> +class json_sax_dom_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @param[in,out] r reference to a JSON value that is manipulated while + parsing + @param[in] allow_exceptions_ whether parse errors yield exceptions + */ + explicit json_sax_dom_parser(BasicJsonType& r, const bool allow_exceptions_ = true) + : root(r), allow_exceptions(allow_exceptions_) + {} + + // make class move-only + json_sax_dom_parser(const json_sax_dom_parser&) = delete; + json_sax_dom_parser(json_sax_dom_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + json_sax_dom_parser& operator=(const json_sax_dom_parser&) = delete; + json_sax_dom_parser& operator=(json_sax_dom_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~json_sax_dom_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::object)); + + if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool key(string_t& val) + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_object()); + + // add null at given key and store the reference for later + object_element = &(ref_stack.back()->m_value.object->operator[](val)); + return true; + } + + bool end_object() + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_object()); + + ref_stack.back()->set_parents(); + ref_stack.pop_back(); + return true; + } + + bool start_array(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::array)); + + if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool end_array() + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_array()); + + ref_stack.back()->set_parents(); + ref_stack.pop_back(); + return true; + } + + template<class Exception> + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast<void>(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + */ + template<typename Value> + JSON_HEDLEY_RETURNS_NON_NULL + BasicJsonType* handle_value(Value&& v) + { + if (ref_stack.empty()) + { + root = BasicJsonType(std::forward<Value>(v)); + return &root; + } + + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->emplace_back(std::forward<Value>(v)); + return &(ref_stack.back()->m_value.array->back()); + } + + JSON_ASSERT(ref_stack.back()->is_object()); + JSON_ASSERT(object_element); + *object_element = BasicJsonType(std::forward<Value>(v)); + return object_element; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector<BasicJsonType*> ref_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; +}; + +template<typename BasicJsonType> +class json_sax_dom_callback_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using parser_callback_t = typename BasicJsonType::parser_callback_t; + using parse_event_t = typename BasicJsonType::parse_event_t; + + json_sax_dom_callback_parser(BasicJsonType& r, + const parser_callback_t cb, + const bool allow_exceptions_ = true) + : root(r), callback(cb), allow_exceptions(allow_exceptions_) + { + keep_stack.push_back(true); + } + + // make class move-only + json_sax_dom_callback_parser(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser(json_sax_dom_callback_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + json_sax_dom_callback_parser& operator=(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser& operator=(json_sax_dom_callback_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~json_sax_dom_callback_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + // check callback for object start + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::object_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::object, true); + ref_stack.push_back(val.second); + + // check object limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool key(string_t& val) + { + BasicJsonType k = BasicJsonType(val); + + // check callback for key + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::key, k); + key_keep_stack.push_back(keep); + + // add discarded value at given key and store the reference for later + if (keep && ref_stack.back()) + { + object_element = &(ref_stack.back()->m_value.object->operator[](val) = discarded); + } + + return true; + } + + bool end_object() + { + if (ref_stack.back()) + { + if (!callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::object_end, *ref_stack.back())) + { + // discard object + *ref_stack.back() = discarded; + } + else + { + ref_stack.back()->set_parents(); + } + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + if (!ref_stack.empty() && ref_stack.back() && ref_stack.back()->is_structured()) + { + // remove discarded value + for (auto it = ref_stack.back()->begin(); it != ref_stack.back()->end(); ++it) + { + if (it->is_discarded()) + { + ref_stack.back()->erase(it); + break; + } + } + } + + return true; + } + + bool start_array(std::size_t len) + { + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::array_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::array, true); + ref_stack.push_back(val.second); + + // check array limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool end_array() + { + bool keep = true; + + if (ref_stack.back()) + { + keep = callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::array_end, *ref_stack.back()); + if (keep) + { + ref_stack.back()->set_parents(); + } + else + { + // discard array + *ref_stack.back() = discarded; + } + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + // remove discarded value + if (!keep && !ref_stack.empty() && ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->pop_back(); + } + + return true; + } + + template<class Exception> + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast<void>(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @param[in] v value to add to the JSON value we build during parsing + @param[in] skip_callback whether we should skip calling the callback + function; this is required after start_array() and + start_object() SAX events, because otherwise we would call the + callback function with an empty array or object, respectively. + + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + + @return pair of boolean (whether value should be kept) and pointer (to the + passed value in the ref_stack hierarchy; nullptr if not kept) + */ + template<typename Value> + std::pair<bool, BasicJsonType*> handle_value(Value&& v, const bool skip_callback = false) + { + JSON_ASSERT(!keep_stack.empty()); + + // do not handle this value if we know it would be added to a discarded + // container + if (!keep_stack.back()) + { + return {false, nullptr}; + } + + // create value + auto value = BasicJsonType(std::forward<Value>(v)); + + // check callback + const bool keep = skip_callback || callback(static_cast<int>(ref_stack.size()), parse_event_t::value, value); + + // do not handle this value if we just learnt it shall be discarded + if (!keep) + { + return {false, nullptr}; + } + + if (ref_stack.empty()) + { + root = std::move(value); + return {true, &root}; + } + + // skip this value if we already decided to skip the parent + // (https://github.com/nlohmann/json/issues/971#issuecomment-413678360) + if (!ref_stack.back()) + { + return {false, nullptr}; + } + + // we now only expect arrays and objects + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + // array + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->emplace_back(std::move(value)); + return {true, &(ref_stack.back()->m_value.array->back())}; + } + + // object + JSON_ASSERT(ref_stack.back()->is_object()); + // check if we should store an element for the current key + JSON_ASSERT(!key_keep_stack.empty()); + const bool store_element = key_keep_stack.back(); + key_keep_stack.pop_back(); + + if (!store_element) + { + return {false, nullptr}; + } + + JSON_ASSERT(object_element); + *object_element = std::move(value); + return {true, object_element}; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector<BasicJsonType*> ref_stack {}; + /// stack to manage which values to keep + std::vector<bool> keep_stack {}; + /// stack to manage which object keys to keep + std::vector<bool> key_keep_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// callback function + const parser_callback_t callback = nullptr; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; + /// a discarded value for the callback + BasicJsonType discarded = BasicJsonType::value_t::discarded; +}; + +template<typename BasicJsonType> +class json_sax_acceptor +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + bool null() + { + return true; + } + + bool boolean(bool /*unused*/) + { + return true; + } + + bool number_integer(number_integer_t /*unused*/) + { + return true; + } + + bool number_unsigned(number_unsigned_t /*unused*/) + { + return true; + } + + bool number_float(number_float_t /*unused*/, const string_t& /*unused*/) + { + return true; + } + + bool string(string_t& /*unused*/) + { + return true; + } + + bool binary(binary_t& /*unused*/) + { + return true; + } + + bool start_object(std::size_t /*unused*/ = static_cast<std::size_t>(-1)) + { + return true; + } + + bool key(string_t& /*unused*/) + { + return true; + } + + bool end_object() + { + return true; + } + + bool start_array(std::size_t /*unused*/ = static_cast<std::size_t>(-1)) + { + return true; + } + + bool end_array() + { + return true; + } + + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, const detail::exception& /*unused*/) + { + return false; + } +}; +} // namespace detail + +} // namespace nlohmann + +// #include <nlohmann/detail/input/lexer.hpp> + + +#include <array> // array +#include <clocale> // localeconv +#include <cstddef> // size_t +#include <cstdio> // snprintf +#include <cstdlib> // strtof, strtod, strtold, strtoll, strtoull +#include <initializer_list> // initializer_list +#include <string> // char_traits, string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/position_t.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////// +// lexer // +/////////// + +template<typename BasicJsonType> +class lexer_base +{ + public: + /// token types for the parser + enum class token_type + { + uninitialized, ///< indicating the scanner is uninitialized + literal_true, ///< the `true` literal + literal_false, ///< the `false` literal + literal_null, ///< the `null` literal + value_string, ///< a string -- use get_string() for actual value + value_unsigned, ///< an unsigned integer -- use get_number_unsigned() for actual value + value_integer, ///< a signed integer -- use get_number_integer() for actual value + value_float, ///< an floating point number -- use get_number_float() for actual value + begin_array, ///< the character for array begin `[` + begin_object, ///< the character for object begin `{` + end_array, ///< the character for array end `]` + end_object, ///< the character for object end `}` + name_separator, ///< the name separator `:` + value_separator, ///< the value separator `,` + parse_error, ///< indicating a parse error + end_of_input, ///< indicating the end of the input buffer + literal_or_value ///< a literal or the begin of a value (only for diagnostics) + }; + + /// return name of values of type token_type (only used for errors) + JSON_HEDLEY_RETURNS_NON_NULL + JSON_HEDLEY_CONST + static const char* token_type_name(const token_type t) noexcept + { + switch (t) + { + case token_type::uninitialized: + return "<uninitialized>"; + case token_type::literal_true: + return "true literal"; + case token_type::literal_false: + return "false literal"; + case token_type::literal_null: + return "null literal"; + case token_type::value_string: + return "string literal"; + case token_type::value_unsigned: + case token_type::value_integer: + case token_type::value_float: + return "number literal"; + case token_type::begin_array: + return "'['"; + case token_type::begin_object: + return "'{'"; + case token_type::end_array: + return "']'"; + case token_type::end_object: + return "'}'"; + case token_type::name_separator: + return "':'"; + case token_type::value_separator: + return "','"; + case token_type::parse_error: + return "<parse error>"; + case token_type::end_of_input: + return "end of input"; + case token_type::literal_or_value: + return "'[', '{', or a literal"; + // LCOV_EXCL_START + default: // catch non-enum values + return "unknown token"; + // LCOV_EXCL_STOP + } + } +}; +/*! +@brief lexical analysis + +This class organizes the lexical analysis during JSON deserialization. +*/ +template<typename BasicJsonType, typename InputAdapterType> +class lexer : public lexer_base<BasicJsonType> +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits<char_type>::int_type; + + public: + using token_type = typename lexer_base<BasicJsonType>::token_type; + + explicit lexer(InputAdapterType&& adapter, bool ignore_comments_ = false) noexcept + : ia(std::move(adapter)) + , ignore_comments(ignore_comments_) + , decimal_point_char(static_cast<char_int_type>(get_decimal_point())) + {} + + // delete because of pointer members + lexer(const lexer&) = delete; + lexer(lexer&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + lexer& operator=(lexer&) = delete; + lexer& operator=(lexer&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~lexer() = default; + + private: + ///////////////////// + // locales + ///////////////////// + + /// return the locale-dependent decimal point + JSON_HEDLEY_PURE + static char get_decimal_point() noexcept + { + const auto* loc = localeconv(); + JSON_ASSERT(loc != nullptr); + return (loc->decimal_point == nullptr) ? '.' : *(loc->decimal_point); + } + + ///////////////////// + // scan functions + ///////////////////// + + /*! + @brief get codepoint from 4 hex characters following `\u` + + For input "\u c1 c2 c3 c4" the codepoint is: + (c1 * 0x1000) + (c2 * 0x0100) + (c3 * 0x0010) + c4 + = (c1 << 12) + (c2 << 8) + (c3 << 4) + (c4 << 0) + + Furthermore, the possible characters '0'..'9', 'A'..'F', and 'a'..'f' + must be converted to the integers 0x0..0x9, 0xA..0xF, 0xA..0xF, resp. The + conversion is done by subtracting the offset (0x30, 0x37, and 0x57) + between the ASCII value of the character and the desired integer value. + + @return codepoint (0x0000..0xFFFF) or -1 in case of an error (e.g. EOF or + non-hex character) + */ + int get_codepoint() + { + // this function only makes sense after reading `\u` + JSON_ASSERT(current == 'u'); + int codepoint = 0; + + const auto factors = { 12u, 8u, 4u, 0u }; + for (const auto factor : factors) + { + get(); + + if (current >= '0' && current <= '9') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x30u) << factor); + } + else if (current >= 'A' && current <= 'F') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x37u) << factor); + } + else if (current >= 'a' && current <= 'f') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x57u) << factor); + } + else + { + return -1; + } + } + + JSON_ASSERT(0x0000 <= codepoint && codepoint <= 0xFFFF); + return codepoint; + } + + /*! + @brief check if the next byte(s) are inside a given range + + Adds the current byte and, for each passed range, reads a new byte and + checks if it is inside the range. If a violation was detected, set up an + error message and return false. Otherwise, return true. + + @param[in] ranges list of integers; interpreted as list of pairs of + inclusive lower and upper bound, respectively + + @pre The passed list @a ranges must have 2, 4, or 6 elements; that is, + 1, 2, or 3 pairs. This precondition is enforced by an assertion. + + @return true if and only if no range violation was detected + */ + bool next_byte_in_range(std::initializer_list<char_int_type> ranges) + { + JSON_ASSERT(ranges.size() == 2 || ranges.size() == 4 || ranges.size() == 6); + add(current); + + for (auto range = ranges.begin(); range != ranges.end(); ++range) + { + get(); + if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range))) + { + add(current); + } + else + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return false; + } + } + + return true; + } + + /*! + @brief scan a string literal + + This function scans a string according to Sect. 7 of RFC 8259. While + scanning, bytes are escaped and copied into buffer token_buffer. Then the + function returns successfully, token_buffer is *not* null-terminated (as it + may contain \0 bytes), and token_buffer.size() is the number of bytes in the + string. + + @return token_type::value_string if string could be successfully scanned, + token_type::parse_error otherwise + + @note In case of errors, variable error_message contains a textual + description. + */ + token_type scan_string() + { + // reset token_buffer (ignore opening quote) + reset(); + + // we entered the function by reading an open quote + JSON_ASSERT(current == '\"'); + + while (true) + { + // get next character + switch (get()) + { + // end of file while parsing string + case std::char_traits<char_type>::eof(): + { + error_message = "invalid string: missing closing quote"; + return token_type::parse_error; + } + + // closing quote + case '\"': + { + return token_type::value_string; + } + + // escapes + case '\\': + { + switch (get()) + { + // quotation mark + case '\"': + add('\"'); + break; + // reverse solidus + case '\\': + add('\\'); + break; + // solidus + case '/': + add('/'); + break; + // backspace + case 'b': + add('\b'); + break; + // form feed + case 'f': + add('\f'); + break; + // line feed + case 'n': + add('\n'); + break; + // carriage return + case 'r': + add('\r'); + break; + // tab + case 't': + add('\t'); + break; + + // unicode escapes + case 'u': + { + const int codepoint1 = get_codepoint(); + int codepoint = codepoint1; // start with codepoint1 + + if (JSON_HEDLEY_UNLIKELY(codepoint1 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if code point is a high surrogate + if (0xD800 <= codepoint1 && codepoint1 <= 0xDBFF) + { + // expect next \uxxxx entry + if (JSON_HEDLEY_LIKELY(get() == '\\' && get() == 'u')) + { + const int codepoint2 = get_codepoint(); + + if (JSON_HEDLEY_UNLIKELY(codepoint2 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if codepoint2 is a low surrogate + if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 && codepoint2 <= 0xDFFF)) + { + // overwrite codepoint + codepoint = static_cast<int>( + // high surrogate occupies the most significant 22 bits + (static_cast<unsigned int>(codepoint1) << 10u) + // low surrogate occupies the least significant 15 bits + + static_cast<unsigned int>(codepoint2) + // there is still the 0xD800, 0xDC00 and 0x10000 noise + // in the result, so we have to subtract with: + // (0xD800 << 10) + DC00 - 0x10000 = 0x35FDC00 + - 0x35FDC00u); + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 && codepoint1 <= 0xDFFF)) + { + error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF"; + return token_type::parse_error; + } + } + + // result of the above calculation yields a proper codepoint + JSON_ASSERT(0x00 <= codepoint && codepoint <= 0x10FFFF); + + // translate codepoint into bytes + if (codepoint < 0x80) + { + // 1-byte characters: 0xxxxxxx (ASCII) + add(static_cast<char_int_type>(codepoint)); + } + else if (codepoint <= 0x7FF) + { + // 2-byte characters: 110xxxxx 10xxxxxx + add(static_cast<char_int_type>(0xC0u | (static_cast<unsigned int>(codepoint) >> 6u))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + else if (codepoint <= 0xFFFF) + { + // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx + add(static_cast<char_int_type>(0xE0u | (static_cast<unsigned int>(codepoint) >> 12u))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 6u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + else + { + // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + add(static_cast<char_int_type>(0xF0u | (static_cast<unsigned int>(codepoint) >> 18u))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 12u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 6u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + + break; + } + + // other characters after escape + default: + error_message = "invalid string: forbidden character after backslash"; + return token_type::parse_error; + } + + break; + } + + // invalid control characters + case 0x00: + { + error_message = "invalid string: control character U+0000 (NUL) must be escaped to \\u0000"; + return token_type::parse_error; + } + + case 0x01: + { + error_message = "invalid string: control character U+0001 (SOH) must be escaped to \\u0001"; + return token_type::parse_error; + } + + case 0x02: + { + error_message = "invalid string: control character U+0002 (STX) must be escaped to \\u0002"; + return token_type::parse_error; + } + + case 0x03: + { + error_message = "invalid string: control character U+0003 (ETX) must be escaped to \\u0003"; + return token_type::parse_error; + } + + case 0x04: + { + error_message = "invalid string: control character U+0004 (EOT) must be escaped to \\u0004"; + return token_type::parse_error; + } + + case 0x05: + { + error_message = "invalid string: control character U+0005 (ENQ) must be escaped to \\u0005"; + return token_type::parse_error; + } + + case 0x06: + { + error_message = "invalid string: control character U+0006 (ACK) must be escaped to \\u0006"; + return token_type::parse_error; + } + + case 0x07: + { + error_message = "invalid string: control character U+0007 (BEL) must be escaped to \\u0007"; + return token_type::parse_error; + } + + case 0x08: + { + error_message = "invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b"; + return token_type::parse_error; + } + + case 0x09: + { + error_message = "invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t"; + return token_type::parse_error; + } + + case 0x0A: + { + error_message = "invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n"; + return token_type::parse_error; + } + + case 0x0B: + { + error_message = "invalid string: control character U+000B (VT) must be escaped to \\u000B"; + return token_type::parse_error; + } + + case 0x0C: + { + error_message = "invalid string: control character U+000C (FF) must be escaped to \\u000C or \\f"; + return token_type::parse_error; + } + + case 0x0D: + { + error_message = "invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r"; + return token_type::parse_error; + } + + case 0x0E: + { + error_message = "invalid string: control character U+000E (SO) must be escaped to \\u000E"; + return token_type::parse_error; + } + + case 0x0F: + { + error_message = "invalid string: control character U+000F (SI) must be escaped to \\u000F"; + return token_type::parse_error; + } + + case 0x10: + { + error_message = "invalid string: control character U+0010 (DLE) must be escaped to \\u0010"; + return token_type::parse_error; + } + + case 0x11: + { + error_message = "invalid string: control character U+0011 (DC1) must be escaped to \\u0011"; + return token_type::parse_error; + } + + case 0x12: + { + error_message = "invalid string: control character U+0012 (DC2) must be escaped to \\u0012"; + return token_type::parse_error; + } + + case 0x13: + { + error_message = "invalid string: control character U+0013 (DC3) must be escaped to \\u0013"; + return token_type::parse_error; + } + + case 0x14: + { + error_message = "invalid string: control character U+0014 (DC4) must be escaped to \\u0014"; + return token_type::parse_error; + } + + case 0x15: + { + error_message = "invalid string: control character U+0015 (NAK) must be escaped to \\u0015"; + return token_type::parse_error; + } + + case 0x16: + { + error_message = "invalid string: control character U+0016 (SYN) must be escaped to \\u0016"; + return token_type::parse_error; + } + + case 0x17: + { + error_message = "invalid string: control character U+0017 (ETB) must be escaped to \\u0017"; + return token_type::parse_error; + } + + case 0x18: + { + error_message = "invalid string: control character U+0018 (CAN) must be escaped to \\u0018"; + return token_type::parse_error; + } + + case 0x19: + { + error_message = "invalid string: control character U+0019 (EM) must be escaped to \\u0019"; + return token_type::parse_error; + } + + case 0x1A: + { + error_message = "invalid string: control character U+001A (SUB) must be escaped to \\u001A"; + return token_type::parse_error; + } + + case 0x1B: + { + error_message = "invalid string: control character U+001B (ESC) must be escaped to \\u001B"; + return token_type::parse_error; + } + + case 0x1C: + { + error_message = "invalid string: control character U+001C (FS) must be escaped to \\u001C"; + return token_type::parse_error; + } + + case 0x1D: + { + error_message = "invalid string: control character U+001D (GS) must be escaped to \\u001D"; + return token_type::parse_error; + } + + case 0x1E: + { + error_message = "invalid string: control character U+001E (RS) must be escaped to \\u001E"; + return token_type::parse_error; + } + + case 0x1F: + { + error_message = "invalid string: control character U+001F (US) must be escaped to \\u001F"; + return token_type::parse_error; + } + + // U+0020..U+007F (except U+0022 (quote) and U+005C (backspace)) + case 0x20: + case 0x21: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + { + add(current); + break; + } + + // U+0080..U+07FF: bytes C2..DF 80..BF + case 0xC2: + case 0xC3: + case 0xC4: + case 0xC5: + case 0xC6: + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD5: + case 0xD6: + case 0xD7: + case 0xD8: + case 0xD9: + case 0xDA: + case 0xDB: + case 0xDC: + case 0xDD: + case 0xDE: + case 0xDF: + { + if (JSON_HEDLEY_UNLIKELY(!next_byte_in_range({0x80, 0xBF}))) + { + return token_type::parse_error; + } + break; + } + + // U+0800..U+0FFF: bytes E0 A0..BF 80..BF + case 0xE0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF + // U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xEE: + case 0xEF: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+D000..U+D7FF: bytes ED 80..9F 80..BF + case 0xED: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x9F, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+10000..U+3FFFF F0 90..BF 80..BF 80..BF + case 0xF0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF + case 0xF1: + case 0xF2: + case 0xF3: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+100000..U+10FFFF F4 80..8F 80..BF 80..BF + case 0xF4: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // remaining bytes (80..C1 and F5..FF) are ill-formed + default: + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return token_type::parse_error; + } + } + } + } + + /*! + * @brief scan a comment + * @return whether comment could be scanned successfully + */ + bool scan_comment() + { + switch (get()) + { + // single-line comments skip input until a newline or EOF is read + case '/': + { + while (true) + { + switch (get()) + { + case '\n': + case '\r': + case std::char_traits<char_type>::eof(): + case '\0': + return true; + + default: + break; + } + } + } + + // multi-line comments skip input until */ is read + case '*': + { + while (true) + { + switch (get()) + { + case std::char_traits<char_type>::eof(): + case '\0': + { + error_message = "invalid comment; missing closing '*/'"; + return false; + } + + case '*': + { + switch (get()) + { + case '/': + return true; + + default: + { + unget(); + continue; + } + } + } + + default: + continue; + } + } + } + + // unexpected character after reading '/' + default: + { + error_message = "invalid comment; expecting '/' or '*' after '/'"; + return false; + } + } + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(float& f, const char* str, char** endptr) noexcept + { + f = std::strtof(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(double& f, const char* str, char** endptr) noexcept + { + f = std::strtod(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(long double& f, const char* str, char** endptr) noexcept + { + f = std::strtold(str, endptr); + } + + /*! + @brief scan a number literal + + This function scans a string according to Sect. 6 of RFC 8259. + + The function is realized with a deterministic finite state machine derived + from the grammar described in RFC 8259. Starting in state "init", the + input is read and used to determined the next state. Only state "done" + accepts the number. State "error" is a trap state to model errors. In the + table below, "anything" means any character but the ones listed before. + + state | 0 | 1-9 | e E | + | - | . | anything + ---------|----------|----------|----------|---------|---------|----------|----------- + init | zero | any1 | [error] | [error] | minus | [error] | [error] + minus | zero | any1 | [error] | [error] | [error] | [error] | [error] + zero | done | done | exponent | done | done | decimal1 | done + any1 | any1 | any1 | exponent | done | done | decimal1 | done + decimal1 | decimal2 | decimal2 | [error] | [error] | [error] | [error] | [error] + decimal2 | decimal2 | decimal2 | exponent | done | done | done | done + exponent | any2 | any2 | [error] | sign | sign | [error] | [error] + sign | any2 | any2 | [error] | [error] | [error] | [error] | [error] + any2 | any2 | any2 | done | done | done | done | done + + The state machine is realized with one label per state (prefixed with + "scan_number_") and `goto` statements between them. The state machine + contains cycles, but any cycle can be left when EOF is read. Therefore, + the function is guaranteed to terminate. + + During scanning, the read bytes are stored in token_buffer. This string is + then converted to a signed integer, an unsigned integer, or a + floating-point number. + + @return token_type::value_unsigned, token_type::value_integer, or + token_type::value_float if number could be successfully scanned, + token_type::parse_error otherwise + + @note The scanner is independent of the current locale. Internally, the + locale's decimal point is used instead of `.` to work with the + locale-dependent converters. + */ + token_type scan_number() // lgtm [cpp/use-of-goto] + { + // reset token_buffer to store the number's bytes + reset(); + + // the type of the parsed number; initially set to unsigned; will be + // changed if minus sign, decimal point or exponent is read + token_type number_type = token_type::value_unsigned; + + // state (init): we just found out we need to scan a number + switch (current) + { + case '-': + { + add(current); + goto scan_number_minus; + } + + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + // all other characters are rejected outside scan_number() + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + +scan_number_minus: + // state: we just parsed a leading minus sign + number_type = token_type::value_integer; + switch (get()) + { + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + default: + { + error_message = "invalid number; expected digit after '-'"; + return token_type::parse_error; + } + } + +scan_number_zero: + // state: we just parse a zero (maybe with a leading minus sign) + switch (get()) + { + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_any1: + // state: we just parsed a number 0-9 (maybe with a leading minus sign) + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_decimal1: + // state: we just parsed a decimal point + number_type = token_type::value_float; + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + default: + { + error_message = "invalid number; expected digit after '.'"; + return token_type::parse_error; + } + } + +scan_number_decimal2: + // we just parsed at least one number after a decimal point + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_exponent: + // we just parsed an exponent + number_type = token_type::value_float; + switch (get()) + { + case '+': + case '-': + { + add(current); + goto scan_number_sign; + } + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = + "invalid number; expected '+', '-', or digit after exponent"; + return token_type::parse_error; + } + } + +scan_number_sign: + // we just parsed an exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = "invalid number; expected digit after exponent sign"; + return token_type::parse_error; + } + } + +scan_number_any2: + // we just parsed a number after the exponent or exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + goto scan_number_done; + } + +scan_number_done: + // unget the character after the number (we only read it to know that + // we are done scanning a number) + unget(); + + char* endptr = nullptr; // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + errno = 0; + + // try to parse integers first and fall back to floats + if (number_type == token_type::value_unsigned) + { + const auto x = std::strtoull(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_unsigned = static_cast<number_unsigned_t>(x); + if (value_unsigned == x) + { + return token_type::value_unsigned; + } + } + } + else if (number_type == token_type::value_integer) + { + const auto x = std::strtoll(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_integer = static_cast<number_integer_t>(x); + if (value_integer == x) + { + return token_type::value_integer; + } + } + } + + // this code is reached if we parse a floating-point number or if an + // integer conversion above failed + strtof(value_float, token_buffer.data(), &endptr); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + return token_type::value_float; + } + + /*! + @param[in] literal_text the literal text to expect + @param[in] length the length of the passed literal text + @param[in] return_type the token type to return on success + */ + JSON_HEDLEY_NON_NULL(2) + token_type scan_literal(const char_type* literal_text, const std::size_t length, + token_type return_type) + { + JSON_ASSERT(std::char_traits<char_type>::to_char_type(current) == literal_text[0]); + for (std::size_t i = 1; i < length; ++i) + { + if (JSON_HEDLEY_UNLIKELY(std::char_traits<char_type>::to_char_type(get()) != literal_text[i])) + { + error_message = "invalid literal"; + return token_type::parse_error; + } + } + return return_type; + } + + ///////////////////// + // input management + ///////////////////// + + /// reset token_buffer; current character is beginning of token + void reset() noexcept + { + token_buffer.clear(); + token_string.clear(); + token_string.push_back(std::char_traits<char_type>::to_char_type(current)); + } + + /* + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a + `std::char_traits<char>::eof()` in that case. Stores the scanned characters + for use in error messages. + + @return character read from the input + */ + char_int_type get() + { + ++position.chars_read_total; + ++position.chars_read_current_line; + + if (next_unget) + { + // just reset the next_unget variable and work with current + next_unget = false; + } + else + { + current = ia.get_character(); + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof())) + { + token_string.push_back(std::char_traits<char_type>::to_char_type(current)); + } + + if (current == '\n') + { + ++position.lines_read; + position.chars_read_current_line = 0; + } + + return current; + } + + /*! + @brief unget current character (read it again on next get) + + We implement unget by setting variable next_unget to true. The input is not + changed - we just simulate ungetting by modifying chars_read_total, + chars_read_current_line, and token_string. The next call to get() will + behave as if the unget character is read again. + */ + void unget() + { + next_unget = true; + + --position.chars_read_total; + + // in case we "unget" a newline, we have to also decrement the lines_read + if (position.chars_read_current_line == 0) + { + if (position.lines_read > 0) + { + --position.lines_read; + } + } + else + { + --position.chars_read_current_line; + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof())) + { + JSON_ASSERT(!token_string.empty()); + token_string.pop_back(); + } + } + + /// add a character to token_buffer + void add(char_int_type c) + { + token_buffer.push_back(static_cast<typename string_t::value_type>(c)); + } + + public: + ///////////////////// + // value getters + ///////////////////// + + /// return integer value + constexpr number_integer_t get_number_integer() const noexcept + { + return value_integer; + } + + /// return unsigned integer value + constexpr number_unsigned_t get_number_unsigned() const noexcept + { + return value_unsigned; + } + + /// return floating-point value + constexpr number_float_t get_number_float() const noexcept + { + return value_float; + } + + /// return current string value (implicitly resets the token; useful only once) + string_t& get_string() + { + return token_buffer; + } + + ///////////////////// + // diagnostics + ///////////////////// + + /// return position of last read token + constexpr position_t get_position() const noexcept + { + return position; + } + + /// return the last read token (for errors only). Will never contain EOF + /// (an arbitrary value that is not a valid char value, often -1), because + /// 255 may legitimately occur. May contain NUL, which should be escaped. + std::string get_token_string() const + { + // escape control characters + std::string result; + for (const auto c : token_string) + { + if (static_cast<unsigned char>(c) <= '\x1F') + { + // escape control characters + std::array<char, 9> cs{{}}; + static_cast<void>((std::snprintf)(cs.data(), cs.size(), "<U+%.4X>", static_cast<unsigned char>(c))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + result += cs.data(); + } + else + { + // add character as is + result.push_back(static_cast<std::string::value_type>(c)); + } + } + + return result; + } + + /// return syntax error message + JSON_HEDLEY_RETURNS_NON_NULL + constexpr const char* get_error_message() const noexcept + { + return error_message; + } + + ///////////////////// + // actual scanner + ///////////////////// + + /*! + @brief skip the UTF-8 byte order mark + @return true iff there is no BOM or the correct BOM has been skipped + */ + bool skip_bom() + { + if (get() == 0xEF) + { + // check if we completely parse the BOM + return get() == 0xBB && get() == 0xBF; + } + + // the first character is not the beginning of the BOM; unget it to + // process is later + unget(); + return true; + } + + void skip_whitespace() + { + do + { + get(); + } + while (current == ' ' || current == '\t' || current == '\n' || current == '\r'); + } + + token_type scan() + { + // initially, skip the BOM + if (position.chars_read_total == 0 && !skip_bom()) + { + error_message = "invalid BOM; must be 0xEF 0xBB 0xBF if given"; + return token_type::parse_error; + } + + // read next character and ignore whitespace + skip_whitespace(); + + // ignore comments + while (ignore_comments && current == '/') + { + if (!scan_comment()) + { + return token_type::parse_error; + } + + // skip following whitespace + skip_whitespace(); + } + + switch (current) + { + // structural characters + case '[': + return token_type::begin_array; + case ']': + return token_type::end_array; + case '{': + return token_type::begin_object; + case '}': + return token_type::end_object; + case ':': + return token_type::name_separator; + case ',': + return token_type::value_separator; + + // literals + case 't': + { + std::array<char_type, 4> true_literal = {{static_cast<char_type>('t'), static_cast<char_type>('r'), static_cast<char_type>('u'), static_cast<char_type>('e')}}; + return scan_literal(true_literal.data(), true_literal.size(), token_type::literal_true); + } + case 'f': + { + std::array<char_type, 5> false_literal = {{static_cast<char_type>('f'), static_cast<char_type>('a'), static_cast<char_type>('l'), static_cast<char_type>('s'), static_cast<char_type>('e')}}; + return scan_literal(false_literal.data(), false_literal.size(), token_type::literal_false); + } + case 'n': + { + std::array<char_type, 4> null_literal = {{static_cast<char_type>('n'), static_cast<char_type>('u'), static_cast<char_type>('l'), static_cast<char_type>('l')}}; + return scan_literal(null_literal.data(), null_literal.size(), token_type::literal_null); + } + + // string + case '\"': + return scan_string(); + + // number + case '-': + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return scan_number(); + + // end of input (the null byte is needed when parsing from + // string literals) + case '\0': + case std::char_traits<char_type>::eof(): + return token_type::end_of_input; + + // error + default: + error_message = "invalid literal"; + return token_type::parse_error; + } + } + + private: + /// input adapter + InputAdapterType ia; + + /// whether comments should be ignored (true) or signaled as errors (false) + const bool ignore_comments = false; + + /// the current character + char_int_type current = std::char_traits<char_type>::eof(); + + /// whether the next get() call should just return current + bool next_unget = false; + + /// the start position of the current token + position_t position {}; + + /// raw input token string (for error messages) + std::vector<char_type> token_string {}; + + /// buffer for variable-length tokens (numbers, strings) + string_t token_buffer {}; + + /// a description of occurred lexer errors + const char* error_message = ""; + + // number values + number_integer_t value_integer = 0; + number_unsigned_t value_unsigned = 0; + number_float_t value_float = 0; + + /// the decimal point + const char_int_type decimal_point_char = '.'; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/is_sax.hpp> + + +#include <cstdint> // size_t +#include <utility> // declval +#include <string> // string + +// #include <nlohmann/detail/meta/detected.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename T> +using null_function_t = decltype(std::declval<T&>().null()); + +template<typename T> +using boolean_function_t = + decltype(std::declval<T&>().boolean(std::declval<bool>())); + +template<typename T, typename Integer> +using number_integer_function_t = + decltype(std::declval<T&>().number_integer(std::declval<Integer>())); + +template<typename T, typename Unsigned> +using number_unsigned_function_t = + decltype(std::declval<T&>().number_unsigned(std::declval<Unsigned>())); + +template<typename T, typename Float, typename String> +using number_float_function_t = decltype(std::declval<T&>().number_float( + std::declval<Float>(), std::declval<const String&>())); + +template<typename T, typename String> +using string_function_t = + decltype(std::declval<T&>().string(std::declval<String&>())); + +template<typename T, typename Binary> +using binary_function_t = + decltype(std::declval<T&>().binary(std::declval<Binary&>())); + +template<typename T> +using start_object_function_t = + decltype(std::declval<T&>().start_object(std::declval<std::size_t>())); + +template<typename T, typename String> +using key_function_t = + decltype(std::declval<T&>().key(std::declval<String&>())); + +template<typename T> +using end_object_function_t = decltype(std::declval<T&>().end_object()); + +template<typename T> +using start_array_function_t = + decltype(std::declval<T&>().start_array(std::declval<std::size_t>())); + +template<typename T> +using end_array_function_t = decltype(std::declval<T&>().end_array()); + +template<typename T, typename Exception> +using parse_error_function_t = decltype(std::declval<T&>().parse_error( + std::declval<std::size_t>(), std::declval<const std::string&>(), + std::declval<const Exception&>())); + +template<typename SAX, typename BasicJsonType> +struct is_sax +{ + private: + static_assert(is_basic_json<BasicJsonType>::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static constexpr bool value = + is_detected_exact<bool, null_function_t, SAX>::value && + is_detected_exact<bool, boolean_function_t, SAX>::value && + is_detected_exact<bool, number_integer_function_t, SAX, number_integer_t>::value && + is_detected_exact<bool, number_unsigned_function_t, SAX, number_unsigned_t>::value && + is_detected_exact<bool, number_float_function_t, SAX, number_float_t, string_t>::value && + is_detected_exact<bool, string_function_t, SAX, string_t>::value && + is_detected_exact<bool, binary_function_t, SAX, binary_t>::value && + is_detected_exact<bool, start_object_function_t, SAX>::value && + is_detected_exact<bool, key_function_t, SAX, string_t>::value && + is_detected_exact<bool, end_object_function_t, SAX>::value && + is_detected_exact<bool, start_array_function_t, SAX>::value && + is_detected_exact<bool, end_array_function_t, SAX>::value && + is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value; +}; + +template<typename SAX, typename BasicJsonType> +struct is_sax_static_asserts +{ + private: + static_assert(is_basic_json<BasicJsonType>::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static_assert(is_detected_exact<bool, null_function_t, SAX>::value, + "Missing/invalid function: bool null()"); + static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert( + is_detected_exact<bool, number_integer_function_t, SAX, + number_integer_t>::value, + "Missing/invalid function: bool number_integer(number_integer_t)"); + static_assert( + is_detected_exact<bool, number_unsigned_function_t, SAX, + number_unsigned_t>::value, + "Missing/invalid function: bool number_unsigned(number_unsigned_t)"); + static_assert(is_detected_exact<bool, number_float_function_t, SAX, + number_float_t, string_t>::value, + "Missing/invalid function: bool number_float(number_float_t, const string_t&)"); + static_assert( + is_detected_exact<bool, string_function_t, SAX, string_t>::value, + "Missing/invalid function: bool string(string_t&)"); + static_assert( + is_detected_exact<bool, binary_function_t, SAX, binary_t>::value, + "Missing/invalid function: bool binary(binary_t&)"); + static_assert(is_detected_exact<bool, start_object_function_t, SAX>::value, + "Missing/invalid function: bool start_object(std::size_t)"); + static_assert(is_detected_exact<bool, key_function_t, SAX, string_t>::value, + "Missing/invalid function: bool key(string_t&)"); + static_assert(is_detected_exact<bool, end_object_function_t, SAX>::value, + "Missing/invalid function: bool end_object()"); + static_assert(is_detected_exact<bool, start_array_function_t, SAX>::value, + "Missing/invalid function: bool start_array(std::size_t)"); + static_assert(is_detected_exact<bool, end_array_function_t, SAX>::value, + "Missing/invalid function: bool end_array()"); + static_assert( + is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value, + "Missing/invalid function: bool parse_error(std::size_t, const " + "std::string&, const exception&)"); +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/// how to treat CBOR tags +enum class cbor_tag_handler_t +{ + error, ///< throw a parse_error exception in case of a tag + ignore, ///< ignore tags + store ///< store tags as binary type +}; + +/*! +@brief determine system byte order + +@return true if and only if system's byte order is little endian + +@note from https://stackoverflow.com/a/1001328/266378 +*/ +static inline bool little_endianness(int num = 1) noexcept +{ + return *reinterpret_cast<char*>(&num) == 1; +} + + +/////////////////// +// binary reader // +/////////////////// + +/*! +@brief deserialization of CBOR, MessagePack, and UBJSON values +*/ +template<typename BasicJsonType, typename InputAdapterType, typename SAX = json_sax_dom_parser<BasicJsonType>> +class binary_reader +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using json_sax_t = SAX; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits<char_type>::int_type; + + public: + /*! + @brief create a binary reader + + @param[in] adapter input adapter to read from + */ + explicit binary_reader(InputAdapterType&& adapter, const input_format_t format = input_format_t::json) noexcept : ia(std::move(adapter)), input_format(format) + { + (void)detail::is_sax_static_asserts<SAX, BasicJsonType> {}; + } + + // make class move-only + binary_reader(const binary_reader&) = delete; + binary_reader(binary_reader&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + binary_reader& operator=(const binary_reader&) = delete; + binary_reader& operator=(binary_reader&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~binary_reader() = default; + + /*! + @param[in] format the binary format to parse + @param[in] sax_ a SAX event processor + @param[in] strict whether to expect the input to be consumed completed + @param[in] tag_handler how to treat CBOR tags + + @return whether parsing was successful + */ + JSON_HEDLEY_NON_NULL(3) + bool sax_parse(const input_format_t format, + json_sax_t* sax_, + const bool strict = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + sax = sax_; + bool result = false; + + switch (format) + { + case input_format_t::bson: + result = parse_bson_internal(); + break; + + case input_format_t::cbor: + result = parse_cbor_internal(true, tag_handler); + break; + + case input_format_t::msgpack: + result = parse_msgpack_internal(); + break; + + case input_format_t::ubjson: + case input_format_t::bjdata: + result = parse_ubjson_internal(); + break; + + case input_format_t::json: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + + // strict mode: next byte must be EOF + if (result && strict) + { + if (input_format == input_format_t::ubjson || input_format == input_format_t::bjdata) + { + get_ignore_noop(); + } + else + { + get(); + } + + if (JSON_HEDLEY_UNLIKELY(current != std::char_traits<char_type>::eof())) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(110, chars_read, + exception_message(input_format, concat("expected end of input; last byte: 0x", get_token_string()), "value"), nullptr)); + } + } + + return result; + } + + private: + ////////// + // BSON // + ////////// + + /*! + @brief Reads in a BSON-object and passes it to the SAX-parser. + @return whether a valid BSON-value was passed to the SAX parser + */ + bool parse_bson_internal() + { + std::int32_t document_size{}; + get_number<std::int32_t, true>(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/false))) + { + return false; + } + + return sax->end_object(); + } + + /*! + @brief Parses a C-style string from the BSON input. + @param[in,out] result A reference to the string variable where the read + string is to be stored. + @return `true` if the \x00-byte indicating the end of the string was + encountered before the EOF; false` indicates an unexpected EOF. + */ + bool get_bson_cstr(string_t& result) + { + auto out = std::back_inserter(result); + while (true) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "cstring"))) + { + return false; + } + if (current == 0x00) + { + return true; + } + *out++ = static_cast<typename string_t::value_type>(current); + } + } + + /*! + @brief Parses a zero-terminated string of length @a len from the BSON + input. + @param[in] len The length (including the zero-byte at the end) of the + string to be read. + @param[in,out] result A reference to the string variable where the read + string is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 1 + @return `true` if the string was successfully parsed + */ + template<typename NumberType> + bool get_bson_string(const NumberType len, string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 1)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::bson, concat("string length must be at least 1, is ", std::to_string(len)), "string"), nullptr)); + } + + return get_string(input_format_t::bson, len - static_cast<NumberType>(1), result) && get() != std::char_traits<char_type>::eof(); + } + + /*! + @brief Parses a byte array input of length @a len from the BSON input. + @param[in] len The length of the byte array to be read. + @param[in,out] result A reference to the binary variable where the read + array is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 0 + @return `true` if the byte array was successfully parsed + */ + template<typename NumberType> + bool get_bson_binary(const NumberType len, binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 0)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::bson, concat("byte array length cannot be negative, is ", std::to_string(len)), "binary"), nullptr)); + } + + // All BSON binary values have a subtype + std::uint8_t subtype{}; + get_number<std::uint8_t>(input_format_t::bson, subtype); + result.set_subtype(subtype); + + return get_binary(input_format_t::bson, len, result); + } + + /*! + @brief Read a BSON document element of the given @a element_type. + @param[in] element_type The BSON element type, c.f. http://bsonspec.org/spec.html + @param[in] element_type_parse_position The position in the input stream, + where the `element_type` was read. + @warning Not all BSON element types are supported yet. An unsupported + @a element_type will give rise to a parse_error.114: + Unsupported BSON record type 0x... + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_internal(const char_int_type element_type, + const std::size_t element_type_parse_position) + { + switch (element_type) + { + case 0x01: // double + { + double number{}; + return get_number<double, true>(input_format_t::bson, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0x02: // string + { + std::int32_t len{}; + string_t value; + return get_number<std::int32_t, true>(input_format_t::bson, len) && get_bson_string(len, value) && sax->string(value); + } + + case 0x03: // object + { + return parse_bson_internal(); + } + + case 0x04: // array + { + return parse_bson_array(); + } + + case 0x05: // binary + { + std::int32_t len{}; + binary_t value; + return get_number<std::int32_t, true>(input_format_t::bson, len) && get_bson_binary(len, value) && sax->binary(value); + } + + case 0x08: // boolean + { + return sax->boolean(get() != 0); + } + + case 0x0A: // null + { + return sax->null(); + } + + case 0x10: // int32 + { + std::int32_t value{}; + return get_number<std::int32_t, true>(input_format_t::bson, value) && sax->number_integer(value); + } + + case 0x12: // int64 + { + std::int64_t value{}; + return get_number<std::int64_t, true>(input_format_t::bson, value) && sax->number_integer(value); + } + + default: // anything else not supported (yet) + { + std::array<char, 3> cr{{}}; + static_cast<void>((std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast<unsigned char>(element_type))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + std::string cr_str{cr.data()}; + return sax->parse_error(element_type_parse_position, cr_str, + parse_error::create(114, element_type_parse_position, concat("Unsupported BSON record type 0x", cr_str), nullptr)); + } + } + } + + /*! + @brief Read a BSON element list (as specified in the BSON-spec) + + The same binary layout is used for objects and arrays, hence it must be + indicated with the argument @a is_array which one is expected + (true --> array, false --> object). + + @param[in] is_array Determines if the element list being read is to be + treated as an object (@a is_array == false), or as an + array (@a is_array == true). + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_list(const bool is_array) + { + string_t key; + + while (auto element_type = get()) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "element list"))) + { + return false; + } + + const std::size_t element_type_parse_position = chars_read; + if (JSON_HEDLEY_UNLIKELY(!get_bson_cstr(key))) + { + return false; + } + + if (!is_array && !sax->key(key)) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_internal(element_type, element_type_parse_position))) + { + return false; + } + + // get_bson_cstr only appends + key.clear(); + } + + return true; + } + + /*! + @brief Reads an array from the BSON input and passes it to the SAX-parser. + @return whether a valid BSON-array was passed to the SAX parser + */ + bool parse_bson_array() + { + std::int32_t document_size{}; + get_number<std::int32_t, true>(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/true))) + { + return false; + } + + return sax->end_array(); + } + + ////////// + // CBOR // + ////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true) or whether the last read character should + be considered instead (false) + @param[in] tag_handler how CBOR tags should be treated + + @return whether a valid CBOR value was passed to the SAX parser + */ + bool parse_cbor_internal(const bool get_char, + const cbor_tag_handler_t tag_handler) + { + switch (get_char ? get() : current) + { + // EOF + case std::char_traits<char_type>::eof(): + return unexpect_eof(input_format_t::cbor, "value"); + + // Integer 0x00..0x17 (0..23) + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + return sax->number_unsigned(static_cast<number_unsigned_t>(current)); + + case 0x18: // Unsigned integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x19: // Unsigned integer (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1A: // Unsigned integer (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1B: // Unsigned integer (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + // Negative integer -1-0x00..-1-0x17 (-1..-24) + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + return sax->number_integer(static_cast<std::int8_t>(0x20 - 1 - current)); + + case 0x38: // Negative integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x39: // Negative integer -1-n (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x3A: // Negative integer -1-n (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) + - static_cast<number_integer_t>(number)); + } + + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: // Binary data (one-byte uint8_t for n follows) + case 0x59: // Binary data (two-byte uint16_t for n follow) + case 0x5A: // Binary data (four-byte uint32_t for n follow) + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + case 0x5F: // Binary data (indefinite length) + { + binary_t b; + return get_cbor_binary(b) && sax->binary(b); + } + + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7F: // UTF-8 string (indefinite length) + { + string_t s; + return get_cbor_string(s) && sax->string(s); + } + + // array (0x00..0x17 data items follow) + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + return get_cbor_array(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x1Fu), tag_handler); + + case 0x98: // array (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x99: // array (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x9A: // array (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x9B: // array (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(detail::conditional_static_cast<std::size_t>(len), tag_handler); + } + + case 0x9F: // array (indefinite length) + return get_cbor_array(static_cast<std::size_t>(-1), tag_handler); + + // map (0x00..0x17 pairs of data items follow) + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + return get_cbor_object(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x1Fu), tag_handler); + + case 0xB8: // map (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xB9: // map (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xBA: // map (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xBB: // map (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(detail::conditional_static_cast<std::size_t>(len), tag_handler); + } + + case 0xBF: // map (indefinite length) + return get_cbor_object(static_cast<std::size_t>(-1), tag_handler); + + case 0xC6: // tagged item + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD8: // tagged item (1 bytes follow) + case 0xD9: // tagged item (2 bytes follow) + case 0xDA: // tagged item (4 bytes follow) + case 0xDB: // tagged item (8 bytes follow) + { + switch (tag_handler) + { + case cbor_tag_handler_t::error: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::cbor, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + + case cbor_tag_handler_t::ignore: + { + // ignore binary subtype + switch (current) + { + case 0xD8: + { + std::uint8_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xD9: + { + std::uint16_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xDA: + { + std::uint32_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xDB: + { + std::uint64_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + default: + break; + } + return parse_cbor_internal(true, tag_handler); + } + + case cbor_tag_handler_t::store: + { + binary_t b; + // use binary subtype and store in binary container + switch (current) + { + case 0xD8: + { + std::uint8_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xD9: + { + std::uint16_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xDA: + { + std::uint32_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xDB: + { + std::uint64_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + default: + return parse_cbor_internal(true, tag_handler); + } + get(); + return get_cbor_binary(b) && sax->binary(b); + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return false; // LCOV_EXCL_LINE + } + } + + case 0xF4: // false + return sax->boolean(false); + + case 0xF5: // true + return sax->boolean(true); + + case 0xF6: // null + return sax->null(); + + case 0xF9: // Half-Precision Float (two-byte IEEE 754) + { + const auto byte1_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + const auto byte2_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + + const auto byte1 = static_cast<unsigned char>(byte1_raw); + const auto byte2 = static_cast<unsigned char>(byte2_raw); + + // code from RFC 7049, Appendix D, Figure 3: + // As half-precision floating-point numbers were only added + // to IEEE 754 in 2008, today's programming platforms often + // still only have limited support for them. It is very + // easy to include at least decoding support for them even + // without such support. An example of a small decoder for + // half-precision floating-point numbers in the C language + // is shown in Fig. 3. + const auto half = static_cast<unsigned int>((byte1 << 8u) + byte2); + const double val = [&half] + { + const int exp = (half >> 10u) & 0x1Fu; + const unsigned int mant = half & 0x3FFu; + JSON_ASSERT(0 <= exp&& exp <= 32); + JSON_ASSERT(mant <= 1024); + switch (exp) + { + case 0: + return std::ldexp(mant, -24); + case 31: + return (mant == 0) + ? std::numeric_limits<double>::infinity() + : std::numeric_limits<double>::quiet_NaN(); + default: + return std::ldexp(mant + 1024, exp - 25); + } + }(); + return sax->number_float((half & 0x8000u) != 0 + ? static_cast<number_float_t>(-val) + : static_cast<number_float_t>(val), ""); + } + + case 0xFA: // Single-Precision Float (four-byte IEEE 754) + { + float number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xFB: // Double-Precision Float (eight-byte IEEE 754) + { + double number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + default: // anything else (0xFF is handled inside the other types) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::cbor, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + } + } + + /*! + @brief reads a CBOR string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + Additionally, CBOR's strings with indefinite lengths are supported. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_cbor_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "string"))) + { + return false; + } + + switch (current) + { + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + { + return get_string(input_format_t::cbor, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7F: // UTF-8 string (indefinite length) + { + while (get() != 0xFF) + { + string_t chunk; + if (!get_cbor_string(chunk)) + { + return false; + } + result.append(chunk); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::cbor, concat("expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0x", last_token), "string"), nullptr)); + } + } + } + + /*! + @brief reads a CBOR byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into the byte array. + Additionally, CBOR's byte arrays with indefinite lengths are supported. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_cbor_binary(binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "binary"))) + { + return false; + } + + switch (current) + { + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + { + return get_binary(input_format_t::cbor, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0x58: // Binary data (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x59: // Binary data (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5A: // Binary data (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5F: // Binary data (indefinite length) + { + while (get() != 0xFF) + { + binary_t chunk; + if (!get_cbor_binary(chunk)) + { + return false; + } + result.insert(result.end(), chunk.begin(), chunk.end()); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::cbor, concat("expected length specification (0x40-0x5B) or indefinite binary array type (0x5F); last byte: 0x", last_token), "binary"), nullptr)); + } + } + } + + /*! + @param[in] len the length of the array or static_cast<std::size_t>(-1) for an + array of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether array creation completed + */ + bool get_cbor_array(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + if (len != static_cast<std::size_t>(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(false, tag_handler))) + { + return false; + } + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object or static_cast<std::size_t>(-1) for an + object of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether object creation completed + */ + bool get_cbor_object(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + if (len != 0) + { + string_t key; + if (len != static_cast<std::size_t>(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + } + + return sax->end_object(); + } + + ///////////// + // MsgPack // + ///////////// + + /*! + @return whether a valid MessagePack value was passed to the SAX parser + */ + bool parse_msgpack_internal() + { + switch (get()) + { + // EOF + case std::char_traits<char_type>::eof(): + return unexpect_eof(input_format_t::msgpack, "value"); + + // positive fixint + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + case 0x18: + case 0x19: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5C: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + return sax->number_unsigned(static_cast<number_unsigned_t>(current)); + + // fixmap + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + return get_msgpack_object(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x0Fu)); + + // fixarray + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + case 0x98: + case 0x99: + case 0x9A: + case 0x9B: + case 0x9C: + case 0x9D: + case 0x9E: + case 0x9F: + return get_msgpack_array(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x0Fu)); + + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + case 0xD9: // str 8 + case 0xDA: // str 16 + case 0xDB: // str 32 + { + string_t s; + return get_msgpack_string(s) && sax->string(s); + } + + case 0xC0: // nil + return sax->null(); + + case 0xC2: // false + return sax->boolean(false); + + case 0xC3: // true + return sax->boolean(true); + + case 0xC4: // bin 8 + case 0xC5: // bin 16 + case 0xC6: // bin 32 + case 0xC7: // ext 8 + case 0xC8: // ext 16 + case 0xC9: // ext 32 + case 0xD4: // fixext 1 + case 0xD5: // fixext 2 + case 0xD6: // fixext 4 + case 0xD7: // fixext 8 + case 0xD8: // fixext 16 + { + binary_t b; + return get_msgpack_binary(b) && sax->binary(b); + } + + case 0xCA: // float 32 + { + float number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xCB: // float 64 + { + double number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xCC: // uint 8 + { + std::uint8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCD: // uint 16 + { + std::uint16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCE: // uint 32 + { + std::uint32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCF: // uint 64 + { + std::uint64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xD0: // int 8 + { + std::int8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD1: // int 16 + { + std::int16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD2: // int 32 + { + std::int32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD3: // int 64 + { + std::int64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xDC: // array 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast<std::size_t>(len)); + } + + case 0xDD: // array 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast<std::size_t>(len)); + } + + case 0xDE: // map 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast<std::size_t>(len)); + } + + case 0xDF: // map 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast<std::size_t>(len)); + } + + // negative fixint + case 0xE0: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xED: + case 0xEE: + case 0xEF: + case 0xF0: + case 0xF1: + case 0xF2: + case 0xF3: + case 0xF4: + case 0xF5: + case 0xF6: + case 0xF7: + case 0xF8: + case 0xF9: + case 0xFA: + case 0xFB: + case 0xFC: + case 0xFD: + case 0xFE: + case 0xFF: + return sax->number_integer(static_cast<std::int8_t>(current)); + + default: // anything else + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::msgpack, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + } + } + + /*! + @brief reads a MessagePack string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_msgpack_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::msgpack, "string"))) + { + return false; + } + + switch (current) + { + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + { + return get_string(input_format_t::msgpack, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0xD9: // str 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDA: // str 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDB: // str 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::msgpack, concat("expected length specification (0xA0-0xBF, 0xD9-0xDB); last byte: 0x", last_token), "string"), nullptr)); + } + } + } + + /*! + @brief reads a MessagePack byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into a byte array. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_msgpack_binary(binary_t& result) + { + // helper function to set the subtype + auto assign_and_return_true = [&result](std::int8_t subtype) + { + result.set_subtype(static_cast<std::uint8_t>(subtype)); + return true; + }; + + switch (current) + { + case 0xC4: // bin 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC5: // bin 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC6: // bin 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC7: // ext 8 + { + std::uint8_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC8: // ext 16 + { + std::uint16_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC9: // ext 32 + { + std::uint32_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xD4: // fixext 1 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 1, result) && + assign_and_return_true(subtype); + } + + case 0xD5: // fixext 2 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 2, result) && + assign_and_return_true(subtype); + } + + case 0xD6: // fixext 4 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 4, result) && + assign_and_return_true(subtype); + } + + case 0xD7: // fixext 8 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 8, result) && + assign_and_return_true(subtype); + } + + case 0xD8: // fixext 16 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 16, result) && + assign_and_return_true(subtype); + } + + default: // LCOV_EXCL_LINE + return false; // LCOV_EXCL_LINE + } + } + + /*! + @param[in] len the length of the array + @return whether array creation completed + */ + bool get_msgpack_array(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object + @return whether object creation completed + */ + bool get_msgpack_object(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + string_t key; + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_msgpack_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + key.clear(); + } + + return sax->end_object(); + } + + //////////// + // UBJSON // + //////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether a valid UBJSON value was passed to the SAX parser + */ + bool parse_ubjson_internal(const bool get_char = true) + { + return get_ubjson_value(get_char ? get_ignore_noop() : current); + } + + /*! + @brief reads a UBJSON string + + This function is either called after reading the 'S' byte explicitly + indicating a string, or in case of an object key where the 'S' byte can be + left out. + + @param[out] result created string + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether string creation completed + */ + bool get_ubjson_string(string_t& result, const bool get_char = true) + { + if (get_char) + { + get(); // TODO(niels): may we ignore N here? + } + + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "value"))) + { + return false; + } + + switch (current) + { + case 'U': + { + std::uint8_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'i': + { + std::int8_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'I': + { + std::int16_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'l': + { + std::int32_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'L': + { + std::int64_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + default: + break; + } + auto last_token = get_token_string(); + std::string message; + + if (input_format != input_format_t::bjdata) + { + message = "expected length type specification (U, i, I, l, L); last byte: 0x" + last_token; + } + else + { + message = "expected length type specification (U, i, u, I, m, l, M, L); last byte: 0x" + last_token; + } + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format, message, "string"), nullptr)); + } + + /*! + @param[out] dim an integer vector storing the ND array dimensions + @return whether reading ND array size vector is successful + */ + bool get_ubjson_ndarray_size(std::vector<size_t>& dim) + { + std::pair<std::size_t, char_int_type> size_and_type; + size_t dimlen = 0; + bool no_ndarray = true; + + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type, no_ndarray))) + { + return false; + } + + if (size_and_type.first != string_t::npos) + { + if (size_and_type.second != 0) + { + if (size_and_type.second != 'N') + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray, size_and_type.second))) + { + return false; + } + dim.push_back(dimlen); + } + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray))) + { + return false; + } + dim.push_back(dimlen); + } + } + } + else + { + while (current != ']') + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray, current))) + { + return false; + } + dim.push_back(dimlen); + get_ignore_noop(); + } + } + return true; + } + + /*! + @param[out] result determined size + @param[in,out] is_ndarray for input, `true` means already inside an ndarray vector + or ndarray dimension is not allowed; `false` means ndarray + is allowed; for output, `true` means an ndarray is found; + is_ndarray can only return `true` when its initial value + is `false` + @param[in] prefix type marker if already read, otherwise set to 0 + + @return whether size determination completed + */ + bool get_ubjson_size_value(std::size_t& result, bool& is_ndarray, char_int_type prefix = 0) + { + if (prefix == 0) + { + prefix = get_ignore_noop(); + } + + switch (prefix) + { + case 'U': + { + std::uint8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'i': + { + std::int8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); // NOLINT(bugprone-signed-char-misuse,cert-str34-c): number is not a char + return true; + } + + case 'I': + { + std::int16_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'l': + { + std::int32_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'L': + { + std::int64_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = detail::conditional_static_cast<std::size_t>(number); + return true; + } + + case '[': + { + if (input_format != input_format_t::bjdata) + { + break; + } + if (is_ndarray) // ndarray dimensional vector can only contain integers, and can not embed another array + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimentional vector is not allowed", "size"), nullptr)); + } + std::vector<size_t> dim; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_ndarray_size(dim))) + { + return false; + } + if (dim.size() == 1 || (dim.size() == 2 && dim.at(0) == 1)) // return normal array size if 1D row vector + { + result = dim.at(dim.size() - 1); + return true; + } + if (!dim.empty()) // if ndarray, convert to an object in JData annotated array format + { + for (auto i : dim) // test if any dimension in an ndarray is 0, if so, return a 1D empty container + { + if ( i == 0 ) + { + result = 0; + return true; + } + } + + string_t key = "_ArraySize_"; + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(3) || !sax->key(key) || !sax->start_array(dim.size()))) + { + return false; + } + result = 1; + for (auto i : dim) + { + result *= i; + if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(static_cast<number_integer_t>(i)))) + { + return false; + } + } + is_ndarray = true; + return sax->end_array(); + } + result = 0; + return true; + } + + default: + break; + } + auto last_token = get_token_string(); + std::string message; + + if (input_format != input_format_t::bjdata) + { + message = "expected length type specification (U, i, I, l, L) after '#'; last byte: 0x" + last_token; + } + else + { + message = "expected length type specification (U, i, u, I, m, l, M, L) after '#'; last byte: 0x" + last_token; + } + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format, message, "size"), nullptr)); + } + + /*! + @brief determine the type and size for a container + + In the optimized UBJSON format, a type and a size can be provided to allow + for a more compact representation. + + @param[out] result pair of the size and the type + @param[in] inside_ndarray whether the parser is parsing an ND array dimensional vector + + @return whether pair creation completed + */ + bool get_ubjson_size_type(std::pair<std::size_t, char_int_type>& result, bool inside_ndarray = false) + { + result.first = string_t::npos; // size + result.second = 0; // type + bool is_ndarray = false; + + get_ignore_noop(); + + if (current == '$') + { + std::vector<char_int_type> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + result.second = get(); // must not ignore 'N', because 'N' maybe the type + if (JSON_HEDLEY_UNLIKELY( input_format == input_format_t::bjdata && std::find(bjdx.begin(), bjdx.end(), result.second) != bjdx.end() )) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, concat("marker 0x", last_token, " is not a permitted optimized array type"), "type"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "type"))) + { + return false; + } + + get_ignore_noop(); + if (JSON_HEDLEY_UNLIKELY(current != '#')) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "value"))) + { + return false; + } + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, concat("expected '#' after type information; last byte: 0x", last_token), "size"), nullptr)); + } + + bool is_error = get_ubjson_size_value(result.first, is_ndarray); + if (input_format == input_format_t::bjdata && is_ndarray) + { + if (inside_ndarray) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(112, chars_read, + exception_message(input_format, "ndarray can not be recursive", "size"), nullptr)); + } + result.second |= (1 << 8); // use bit 8 to indicate ndarray, all UBJSON and BJData markers should be ASCII letters + } + return is_error; + } + + if (current == '#') + { + bool is_error = get_ubjson_size_value(result.first, is_ndarray); + if (input_format == input_format_t::bjdata && is_ndarray) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(112, chars_read, + exception_message(input_format, "ndarray requires both type and size", "size"), nullptr)); + } + return is_error; + } + + return true; + } + + /*! + @param prefix the previously read or set type prefix + @return whether value creation completed + */ + bool get_ubjson_value(const char_int_type prefix) + { + switch (prefix) + { + case std::char_traits<char_type>::eof(): // EOF + return unexpect_eof(input_format, "value"); + + case 'T': // true + return sax->boolean(true); + case 'F': // false + return sax->boolean(false); + + case 'Z': // null + return sax->null(); + + case 'U': + { + std::uint8_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'i': + { + std::int8_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'I': + { + std::int16_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'l': + { + std::int32_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'L': + { + std::int64_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'h': + { + if (input_format != input_format_t::bjdata) + { + break; + } + const auto byte1_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + const auto byte2_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + + const auto byte1 = static_cast<unsigned char>(byte1_raw); + const auto byte2 = static_cast<unsigned char>(byte2_raw); + + // code from RFC 7049, Appendix D, Figure 3: + // As half-precision floating-point numbers were only added + // to IEEE 754 in 2008, today's programming platforms often + // still only have limited support for them. It is very + // easy to include at least decoding support for them even + // without such support. An example of a small decoder for + // half-precision floating-point numbers in the C language + // is shown in Fig. 3. + const auto half = static_cast<unsigned int>((byte2 << 8u) + byte1); + const double val = [&half] + { + const int exp = (half >> 10u) & 0x1Fu; + const unsigned int mant = half & 0x3FFu; + JSON_ASSERT(0 <= exp&& exp <= 32); + JSON_ASSERT(mant <= 1024); + switch (exp) + { + case 0: + return std::ldexp(mant, -24); + case 31: + return (mant == 0) + ? std::numeric_limits<double>::infinity() + : std::numeric_limits<double>::quiet_NaN(); + default: + return std::ldexp(mant + 1024, exp - 25); + } + }(); + return sax->number_float((half & 0x8000u) != 0 + ? static_cast<number_float_t>(-val) + : static_cast<number_float_t>(val), ""); + } + + case 'd': + { + float number{}; + return get_number(input_format, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 'D': + { + double number{}; + return get_number(input_format, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 'H': + { + return get_ubjson_high_precision_number(); + } + + case 'C': // char + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "char"))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(current > 127)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format, concat("byte after 'C' must be in range 0x00..0x7F; last byte: 0x", last_token), "char"), nullptr)); + } + string_t s(1, static_cast<typename string_t::value_type>(current)); + return sax->string(s); + } + + case 'S': // string + { + string_t s; + return get_ubjson_string(s) && sax->string(s); + } + + case '[': // array + return get_ubjson_array(); + + case '{': // object + return get_ubjson_object(); + + default: // anything else + break; + } + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format, "invalid byte: 0x" + last_token, "value"), nullptr)); + } + + /*! + @return whether array creation completed + */ + bool get_ubjson_array() + { + std::pair<std::size_t, char_int_type> size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + // if bit-8 of size_and_type.second is set to 1, encode bjdata ndarray as an object in JData annotated array format (https://github.com/NeuroJSON/jdata): + // {"_ArrayType_" : "typeid", "_ArraySize_" : [n1, n2, ...], "_ArrayData_" : [v1, v2, ...]} + + if (input_format == input_format_t::bjdata && size_and_type.first != string_t::npos && (size_and_type.second & (1 << 8)) != 0) + { + std::map<char_int_type, string_t> bjdtype = {{'U', "uint8"}, {'i', "int8"}, {'u', "uint16"}, {'I', "int16"}, + {'m', "uint32"}, {'l', "int32"}, {'M', "uint64"}, {'L', "int64"}, {'d', "single"}, {'D', "double"}, {'C', "char"} + }; + + size_and_type.second &= ~(static_cast<char_int_type>(1) << 8); // use bit 8 to indicate ndarray, here we remove the bit to restore the type marker + + string_t key = "_ArrayType_"; + if (JSON_HEDLEY_UNLIKELY(bjdtype.count(size_and_type.second) == 0)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, "invalid byte: 0x" + last_token, "type"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->key(key) || !sax->string(bjdtype[size_and_type.second]) )) + { + return false; + } + + if (size_and_type.second == 'C') + { + size_and_type.second = 'U'; + } + + key = "_ArrayData_"; + if (JSON_HEDLEY_UNLIKELY(!sax->key(key) || !sax->start_array(size_and_type.first) )) + { + return false; + } + + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + } + + return (sax->end_array() && sax->end_object()); + } + + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + if (size_and_type.second != 'N') + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + } + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + while (current != ']') + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal(false))) + { + return false; + } + get_ignore_noop(); + } + } + + return sax->end_array(); + } + + /*! + @return whether object creation completed + */ + bool get_ubjson_object() + { + std::pair<std::size_t, char_int_type> size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + // do not accept ND-array size in objects in BJData + if (input_format == input_format_t::bjdata && size_and_type.first != string_t::npos && (size_and_type.second & (1 << 8)) != 0) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, "BJData object does not support ND-array size in optimized format", "object"), nullptr)); + } + + string_t key; + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + key.clear(); + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + key.clear(); + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + while (current != '}') + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key, false) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + get_ignore_noop(); + key.clear(); + } + } + + return sax->end_object(); + } + + // Note, no reader for UBJSON binary types is implemented because they do + // not exist + + bool get_ubjson_high_precision_number() + { + // get size of following number string + std::size_t size{}; + bool no_ndarray = true; + auto res = get_ubjson_size_value(size, no_ndarray); + if (JSON_HEDLEY_UNLIKELY(!res)) + { + return res; + } + + // get number string + std::vector<char> number_vector; + for (std::size_t i = 0; i < size; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + number_vector.push_back(static_cast<char>(current)); + } + + // parse number string + using ia_type = decltype(detail::input_adapter(number_vector)); + auto number_lexer = detail::lexer<BasicJsonType, ia_type>(detail::input_adapter(number_vector), false); + const auto result_number = number_lexer.scan(); + const auto number_string = number_lexer.get_token_string(); + const auto result_remainder = number_lexer.scan(); + + using token_type = typename detail::lexer_base<BasicJsonType>::token_type; + + if (JSON_HEDLEY_UNLIKELY(result_remainder != token_type::end_of_input)) + { + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, + exception_message(input_format, concat("invalid number text: ", number_lexer.get_token_string()), "high-precision number"), nullptr)); + } + + switch (result_number) + { + case token_type::value_integer: + return sax->number_integer(number_lexer.get_number_integer()); + case token_type::value_unsigned: + return sax->number_unsigned(number_lexer.get_number_unsigned()); + case token_type::value_float: + return sax->number_float(number_lexer.get_number_float(), std::move(number_string)); + case token_type::uninitialized: + case token_type::literal_true: + case token_type::literal_false: + case token_type::literal_null: + case token_type::value_string: + case token_type::begin_array: + case token_type::begin_object: + case token_type::end_array: + case token_type::end_object: + case token_type::name_separator: + case token_type::value_separator: + case token_type::parse_error: + case token_type::end_of_input: + case token_type::literal_or_value: + default: + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, + exception_message(input_format, concat("invalid number text: ", number_lexer.get_token_string()), "high-precision number"), nullptr)); + } + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /*! + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a -'ve valued + `std::char_traits<char_type>::eof()` in that case. + + @return character read from the input + */ + char_int_type get() + { + ++chars_read; + return current = ia.get_character(); + } + + /*! + @return character read from the input after ignoring all 'N' entries + */ + char_int_type get_ignore_noop() + { + do + { + get(); + } + while (current == 'N'); + + return current; + } + + /* + @brief read a number from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[out] result number of type @a NumberType + + @return whether conversion completed + + @note This function needs to respect the system's endianness, because + bytes in CBOR, MessagePack, and UBJSON are stored in network order + (big endian) and therefore need reordering on little endian systems. + On the other hand, BSON and BJData use little endian and should reorder + on big endian systems. + */ + template<typename NumberType, bool InputIsLittleEndian = false> + bool get_number(const input_format_t format, NumberType& result) + { + // step 1: read input into array with system's byte order + std::array<std::uint8_t, sizeof(NumberType)> vec{}; + for (std::size_t i = 0; i < sizeof(NumberType); ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "number"))) + { + return false; + } + + // reverse byte order prior to conversion if necessary + if (is_little_endian != (InputIsLittleEndian || format == input_format_t::bjdata)) + { + vec[sizeof(NumberType) - i - 1] = static_cast<std::uint8_t>(current); + } + else + { + vec[i] = static_cast<std::uint8_t>(current); // LCOV_EXCL_LINE + } + } + + // step 2: convert array into number of type T and return + std::memcpy(&result, vec.data(), sizeof(NumberType)); + return true; + } + + /*! + @brief create a string by reading characters from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of characters to read + @param[out] result string created by reading @a len bytes + + @return whether string creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of string memory. + */ + template<typename NumberType> + bool get_string(const input_format_t format, + const NumberType len, + string_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "string"))) + { + success = false; + break; + } + result.push_back(static_cast<typename string_t::value_type>(current)); + } + return success; + } + + /*! + @brief create a byte array by reading bytes from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of bytes to read + @param[out] result byte array created by reading @a len bytes + + @return whether byte array creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of memory. + */ + template<typename NumberType> + bool get_binary(const input_format_t format, + const NumberType len, + binary_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "binary"))) + { + success = false; + break; + } + result.push_back(static_cast<std::uint8_t>(current)); + } + return success; + } + + /*! + @param[in] format the current format (for diagnostics) + @param[in] context further context information (for diagnostics) + @return whether the last read character is not EOF + */ + JSON_HEDLEY_NON_NULL(3) + bool unexpect_eof(const input_format_t format, const char* context) const + { + if (JSON_HEDLEY_UNLIKELY(current == std::char_traits<char_type>::eof())) + { + return sax->parse_error(chars_read, "<end of file>", + parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context), nullptr)); + } + return true; + } + + /*! + @return a string representation of the last read byte + */ + std::string get_token_string() const + { + std::array<char, 3> cr{{}}; + static_cast<void>((std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast<unsigned char>(current))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + return std::string{cr.data()}; + } + + /*! + @param[in] format the current format + @param[in] detail a detailed error message + @param[in] context further context information + @return a message string to use in the parse_error exceptions + */ + std::string exception_message(const input_format_t format, + const std::string& detail, + const std::string& context) const + { + std::string error_msg = "syntax error while parsing "; + + switch (format) + { + case input_format_t::cbor: + error_msg += "CBOR"; + break; + + case input_format_t::msgpack: + error_msg += "MessagePack"; + break; + + case input_format_t::ubjson: + error_msg += "UBJSON"; + break; + + case input_format_t::bson: + error_msg += "BSON"; + break; + + case input_format_t::bjdata: + error_msg += "BJData"; + break; + + case input_format_t::json: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + + return concat(error_msg, ' ', context, ": ", detail); + } + + private: + /// input adapter + InputAdapterType ia; + + /// the current character + char_int_type current = std::char_traits<char_type>::eof(); + + /// the number of characters read + std::size_t chars_read = 0; + + /// whether we can assume little endianness + const bool is_little_endian = little_endianness(); + + /// input format + const input_format_t input_format = input_format_t::json; + + /// the SAX parser + json_sax_t* sax = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/lexer.hpp> + +// #include <nlohmann/detail/input/parser.hpp> + + +#include <cmath> // isfinite +#include <cstdint> // uint8_t +#include <functional> // function +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/json_sax.hpp> + +// #include <nlohmann/detail/input/lexer.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/is_sax.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +//////////// +// parser // +//////////// + +enum class parse_event_t : std::uint8_t +{ + /// the parser read `{` and started to process a JSON object + object_start, + /// the parser read `}` and finished processing a JSON object + object_end, + /// the parser read `[` and started to process a JSON array + array_start, + /// the parser read `]` and finished processing a JSON array + array_end, + /// the parser read a key of a value in an object + key, + /// the parser finished reading a JSON value + value +}; + +template<typename BasicJsonType> +using parser_callback_t = + std::function<bool(int /*depth*/, parse_event_t /*event*/, BasicJsonType& /*parsed*/)>; + +/*! +@brief syntax analysis + +This class implements a recursive descent parser. +*/ +template<typename BasicJsonType, typename InputAdapterType> +class parser +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using lexer_t = lexer<BasicJsonType, InputAdapterType>; + using token_type = typename lexer_t::token_type; + + public: + /// a parser reading from an input adapter + explicit parser(InputAdapterType&& adapter, + const parser_callback_t<BasicJsonType> cb = nullptr, + const bool allow_exceptions_ = true, + const bool skip_comments = false) + : callback(cb) + , m_lexer(std::move(adapter), skip_comments) + , allow_exceptions(allow_exceptions_) + { + // read first token + get_token(); + } + + /*! + @brief public parser interface + + @param[in] strict whether to expect the last token to be EOF + @param[in,out] result parsed JSON value + + @throw parse_error.101 in case of an unexpected token + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + */ + void parse(const bool strict, BasicJsonType& result) + { + if (callback) + { + json_sax_dom_callback_parser<BasicJsonType> sdp(result, callback, allow_exceptions); + sax_parse_internal(&sdp); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_of_input, "value"), nullptr)); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + + // set top-level value to null if it was discarded by the callback + // function + if (result.is_discarded()) + { + result = nullptr; + } + } + else + { + json_sax_dom_parser<BasicJsonType> sdp(result, allow_exceptions); + sax_parse_internal(&sdp); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr)); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + } + + result.assert_invariant(); + } + + /*! + @brief public accept interface + + @param[in] strict whether to expect the last token to be EOF + @return whether the input is a proper JSON text + */ + bool accept(const bool strict = true) + { + json_sax_acceptor<BasicJsonType> sax_acceptor; + return sax_parse(&sax_acceptor, strict); + } + + template<typename SAX> + JSON_HEDLEY_NON_NULL(2) + bool sax_parse(SAX* sax, const bool strict = true) + { + (void)detail::is_sax_static_asserts<SAX, BasicJsonType> {}; + const bool result = sax_parse_internal(sax); + + // strict mode: next byte must be EOF + if (result && strict && (get_token() != token_type::end_of_input)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr)); + } + + return result; + } + + private: + template<typename SAX> + JSON_HEDLEY_NON_NULL(2) + bool sax_parse_internal(SAX* sax) + { + // stack to remember the hierarchy of structured values we are parsing + // true = array; false = object + std::vector<bool> states; + // value to avoid a goto (see comment where set to true) + bool skip_to_state_evaluation = false; + + while (true) + { + if (!skip_to_state_evaluation) + { + // invariant: get_token() was called before each iteration + switch (last_token) + { + case token_type::begin_object: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + // closing } -> we are done + if (get_token() == token_type::end_object) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + break; + } + + // parse key + if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr)); + } + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr)); + } + + // remember we are now inside an object + states.push_back(false); + + // parse values + get_token(); + continue; + } + + case token_type::begin_array: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + // closing ] -> we are done + if (get_token() == token_type::end_array) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + break; + } + + // remember we are now inside an array + states.push_back(true); + + // parse values (no need to call get_token) + continue; + } + + case token_type::value_float: + { + const auto res = m_lexer.get_number_float(); + + if (JSON_HEDLEY_UNLIKELY(!std::isfinite(res))) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + out_of_range::create(406, concat("number overflow parsing '", m_lexer.get_token_string(), '\''), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->number_float(res, m_lexer.get_string()))) + { + return false; + } + + break; + } + + case token_type::literal_false: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(false))) + { + return false; + } + break; + } + + case token_type::literal_null: + { + if (JSON_HEDLEY_UNLIKELY(!sax->null())) + { + return false; + } + break; + } + + case token_type::literal_true: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(true))) + { + return false; + } + break; + } + + case token_type::value_integer: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(m_lexer.get_number_integer()))) + { + return false; + } + break; + } + + case token_type::value_string: + { + if (JSON_HEDLEY_UNLIKELY(!sax->string(m_lexer.get_string()))) + { + return false; + } + break; + } + + case token_type::value_unsigned: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_unsigned(m_lexer.get_number_unsigned()))) + { + return false; + } + break; + } + + case token_type::parse_error: + { + // using "uninitialized" to avoid "expected" message + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized, "value"), nullptr)); + } + + case token_type::uninitialized: + case token_type::end_array: + case token_type::end_object: + case token_type::name_separator: + case token_type::value_separator: + case token_type::end_of_input: + case token_type::literal_or_value: + default: // the last token was unexpected + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value, "value"), nullptr)); + } + } + } + else + { + skip_to_state_evaluation = false; + } + + // we reached this line after we successfully parsed a value + if (states.empty()) + { + // empty stack: we reached the end of the hierarchy: done + return true; + } + + if (states.back()) // array + { + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse a new value + get_token(); + continue; + } + + // closing ] + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + + // We are done with this array. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array, "array"), nullptr)); + } + + // states.back() is false -> object + + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse key + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr)); + } + + // parse values + get_token(); + continue; + } + + // closing } + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + + // We are done with this object. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object, "object"), nullptr)); + } + } + + /// get next token from lexer + token_type get_token() + { + return last_token = m_lexer.scan(); + } + + std::string exception_message(const token_type expected, const std::string& context) + { + std::string error_msg = "syntax error "; + + if (!context.empty()) + { + error_msg += concat("while parsing ", context, ' '); + } + + error_msg += "- "; + + if (last_token == token_type::parse_error) + { + error_msg += concat(m_lexer.get_error_message(), "; last read: '", + m_lexer.get_token_string(), '\''); + } + else + { + error_msg += concat("unexpected ", lexer_t::token_type_name(last_token)); + } + + if (expected != token_type::uninitialized) + { + error_msg += concat("; expected ", lexer_t::token_type_name(expected)); + } + + return error_msg; + } + + private: + /// callback function + const parser_callback_t<BasicJsonType> callback = nullptr; + /// the type of the last read token + token_type last_token = token_type::uninitialized; + /// the lexer + lexer_t m_lexer; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/internal_iterator.hpp> + + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + + +#include <cstddef> // ptrdiff_t +#include <limits> // numeric_limits + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/* +@brief an iterator for primitive JSON types + +This class models an iterator for primitive JSON types (boolean, number, +string). It's only purpose is to allow the iterator/const_iterator classes +to "iterate" over primitive values. Internally, the iterator is modeled by +a `difference_type` variable. Value begin_value (`0`) models the begin, +end_value (`1`) models past the end. +*/ +class primitive_iterator_t +{ + private: + using difference_type = std::ptrdiff_t; + static constexpr difference_type begin_value = 0; + static constexpr difference_type end_value = begin_value + 1; + + JSON_PRIVATE_UNLESS_TESTED: + /// iterator as signed integer type + difference_type m_it = (std::numeric_limits<std::ptrdiff_t>::min)(); + + public: + constexpr difference_type get_value() const noexcept + { + return m_it; + } + + /// set iterator to a defined beginning + void set_begin() noexcept + { + m_it = begin_value; + } + + /// set iterator to a defined past the end + void set_end() noexcept + { + m_it = end_value; + } + + /// return whether the iterator can be dereferenced + constexpr bool is_begin() const noexcept + { + return m_it == begin_value; + } + + /// return whether the iterator is at end + constexpr bool is_end() const noexcept + { + return m_it == end_value; + } + + friend constexpr bool operator==(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it == rhs.m_it; + } + + friend constexpr bool operator<(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it < rhs.m_it; + } + + primitive_iterator_t operator+(difference_type n) noexcept + { + auto result = *this; + result += n; + return result; + } + + friend constexpr difference_type operator-(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it - rhs.m_it; + } + + primitive_iterator_t& operator++() noexcept + { + ++m_it; + return *this; + } + + primitive_iterator_t operator++(int)& noexcept // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + ++m_it; + return result; + } + + primitive_iterator_t& operator--() noexcept + { + --m_it; + return *this; + } + + primitive_iterator_t operator--(int)& noexcept // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + --m_it; + return result; + } + + primitive_iterator_t& operator+=(difference_type n) noexcept + { + m_it += n; + return *this; + } + + primitive_iterator_t& operator-=(difference_type n) noexcept + { + m_it -= n; + return *this; + } +}; +} // namespace detail +} // namespace nlohmann + + +namespace nlohmann +{ +namespace detail +{ +/*! +@brief an iterator value + +@note This structure could easily be a union, but MSVC currently does not allow +unions members with complex constructors, see https://github.com/nlohmann/json/pull/105. +*/ +template<typename BasicJsonType> struct internal_iterator +{ + /// iterator for JSON objects + typename BasicJsonType::object_t::iterator object_iterator {}; + /// iterator for JSON arrays + typename BasicJsonType::array_t::iterator array_iterator {}; + /// generic iterator for all other types + primitive_iterator_t primitive_iterator {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/iter_impl.hpp> + + +#include <iterator> // iterator, random_access_iterator_tag, bidirectional_iterator_tag, advance, next +#include <type_traits> // conditional, is_const, remove_const + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/iterators/internal_iterator.hpp> + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +// forward declare, to be able to friend it later on +template<typename IteratorType> class iteration_proxy; +template<typename IteratorType> class iteration_proxy_value; + +/*! +@brief a template for a bidirectional iterator for the @ref basic_json class +This class implements a both iterators (iterator and const_iterator) for the +@ref basic_json class. +@note An iterator is called *initialized* when a pointer to a JSON value has + been set (e.g., by a constructor or a copy assignment). If the iterator is + default-constructed, it is *uninitialized* and most methods are undefined. + **The library uses assertions to detect calls on uninitialized iterators.** +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +@since version 1.0.0, simplified in version 2.0.9, change to bidirectional + iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593) +*/ +template<typename BasicJsonType> +class iter_impl // NOLINT(cppcoreguidelines-special-member-functions,hicpp-special-member-functions) +{ + /// the iterator with BasicJsonType of different const-ness + using other_iter_impl = iter_impl<typename std::conditional<std::is_const<BasicJsonType>::value, typename std::remove_const<BasicJsonType>::type, const BasicJsonType>::type>; + /// allow basic_json to access private members + friend other_iter_impl; + friend BasicJsonType; + friend iteration_proxy<iter_impl>; + friend iteration_proxy_value<iter_impl>; + + using object_t = typename BasicJsonType::object_t; + using array_t = typename BasicJsonType::array_t; + // make sure BasicJsonType is basic_json or const basic_json + static_assert(is_basic_json<typename std::remove_const<BasicJsonType>::type>::value, + "iter_impl only accepts (const) basic_json"); + // superficial check for the LegacyBidirectionalIterator named requirement + static_assert(std::is_base_of<std::bidirectional_iterator_tag, std::bidirectional_iterator_tag>::value + && std::is_base_of<std::bidirectional_iterator_tag, typename array_t::iterator::iterator_category>::value, + "basic_json iterator assumes array and object type iterators satisfy the LegacyBidirectionalIterator named requirement."); + + public: + /// The std::iterator class template (used as a base class to provide typedefs) is deprecated in C++17. + /// The C++ Standard has never required user-defined iterators to derive from std::iterator. + /// A user-defined iterator should provide publicly accessible typedefs named + /// iterator_category, value_type, difference_type, pointer, and reference. + /// Note that value_type is required to be non-const, even for constant iterators. + using iterator_category = std::bidirectional_iterator_tag; + + /// the type of the values when the iterator is dereferenced + using value_type = typename BasicJsonType::value_type; + /// a type to represent differences between iterators + using difference_type = typename BasicJsonType::difference_type; + /// defines a pointer to the type iterated over (value_type) + using pointer = typename std::conditional<std::is_const<BasicJsonType>::value, + typename BasicJsonType::const_pointer, + typename BasicJsonType::pointer>::type; + /// defines a reference to the type iterated over (value_type) + using reference = + typename std::conditional<std::is_const<BasicJsonType>::value, + typename BasicJsonType::const_reference, + typename BasicJsonType::reference>::type; + + iter_impl() = default; + ~iter_impl() = default; + iter_impl(iter_impl&&) noexcept = default; + iter_impl& operator=(iter_impl&&) noexcept = default; + + /*! + @brief constructor for a given JSON instance + @param[in] object pointer to a JSON object for this iterator + @pre object != nullptr + @post The iterator is initialized; i.e. `m_object != nullptr`. + */ + explicit iter_impl(pointer object) noexcept : m_object(object) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = typename object_t::iterator(); + break; + } + + case value_t::array: + { + m_it.array_iterator = typename array_t::iterator(); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator = primitive_iterator_t(); + break; + } + } + } + + /*! + @note The conventional copy constructor and copy assignment are implicitly + defined. Combined with the following converting constructor and + assignment, they support: (1) copy from iterator to iterator, (2) + copy from const iterator to const iterator, and (3) conversion from + iterator to const iterator. However conversion from const iterator + to iterator is not defined. + */ + + /*! + @brief const copy constructor + @param[in] other const iterator to copy from + @note This copy constructor had to be defined explicitly to circumvent a bug + occurring on msvc v19.0 compiler (VS 2015) debug build. For more + information refer to: https://github.com/nlohmann/json/issues/1608 + */ + iter_impl(const iter_impl<const BasicJsonType>& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl<const BasicJsonType>& other) noexcept + { + if (&other != this) + { + m_object = other.m_object; + m_it = other.m_it; + } + return *this; + } + + /*! + @brief converting constructor + @param[in] other non-const iterator to copy from + @note It is not checked whether @a other is initialized. + */ + iter_impl(const iter_impl<typename std::remove_const<BasicJsonType>::type>& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other non-const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl<typename std::remove_const<BasicJsonType>::type>& other) noexcept // NOLINT(cert-oop54-cpp) + { + m_object = other.m_object; + m_it = other.m_it; + return *this; + } + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief set the iterator to the first value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_begin() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->begin(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->begin(); + break; + } + + case value_t::null: + { + // set to end so begin()==end() is true: null is empty + m_it.primitive_iterator.set_end(); + break; + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator.set_begin(); + break; + } + } + } + + /*! + @brief set the iterator past the last value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_end() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->end(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->end(); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator.set_end(); + break; + } + } + } + + public: + /*! + @brief return a reference to the value pointed to by the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator*() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return m_it.object_iterator->second; + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return *m_it.array_iterator; + } + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief dereference the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + pointer operator->() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return &(m_it.object_iterator->second); + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return &*m_it.array_iterator; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief post-increment (it++) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator++(int)& // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + ++(*this); + return result; + } + + /*! + @brief pre-increment (++it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator++() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, 1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, 1); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + ++m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief post-decrement (it--) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator--(int)& // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + --(*this); + return result; + } + + /*! + @brief pre-decrement (--it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator--() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, -1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, -1); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + --m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief comparison: equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + template < typename IterImpl, detail::enable_if_t < (std::is_same<IterImpl, iter_impl>::value || std::is_same<IterImpl, other_iter_impl>::value), std::nullptr_t > = nullptr > + bool operator==(const IterImpl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers", m_object)); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + return (m_it.object_iterator == other.m_it.object_iterator); + + case value_t::array: + return (m_it.array_iterator == other.m_it.array_iterator); + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return (m_it.primitive_iterator == other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: not equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + template < typename IterImpl, detail::enable_if_t < (std::is_same<IterImpl, iter_impl>::value || std::is_same<IterImpl, other_iter_impl>::value), std::nullptr_t > = nullptr > + bool operator!=(const IterImpl& other) const + { + return !operator==(other); + } + + /*! + @brief comparison: smaller + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<(const iter_impl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers", m_object)); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(213, "cannot compare order of object iterators", m_object)); + + case value_t::array: + return (m_it.array_iterator < other.m_it.array_iterator); + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return (m_it.primitive_iterator < other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: less than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<=(const iter_impl& other) const + { + return !other.operator < (*this); + } + + /*! + @brief comparison: greater than + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>(const iter_impl& other) const + { + return !operator<=(other); + } + + /*! + @brief comparison: greater than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>=(const iter_impl& other) const + { + return !operator<(other); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator+=(difference_type i) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators", m_object)); + + case value_t::array: + { + std::advance(m_it.array_iterator, i); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator += i; + break; + } + } + + return *this; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator-=(difference_type i) + { + return operator+=(-i); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator+(difference_type i) const + { + auto result = *this; + result += i; + return result; + } + + /*! + @brief addition of distance and iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + friend iter_impl operator+(difference_type i, const iter_impl& it) + { + auto result = it; + result += i; + return result; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator-(difference_type i) const + { + auto result = *this; + result -= i; + return result; + } + + /*! + @brief return difference + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + difference_type operator-(const iter_impl& other) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators", m_object)); + + case value_t::array: + return m_it.array_iterator - other.m_it.array_iterator; + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return m_it.primitive_iterator - other.m_it.primitive_iterator; + } + } + + /*! + @brief access to successor + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator[](difference_type n) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(208, "cannot use operator[] for object iterators", m_object)); + + case value_t::array: + return *std::next(m_it.array_iterator, n); + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.get_value() == -n)) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief return the key of an object iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + const typename object_t::key_type& key() const + { + JSON_ASSERT(m_object != nullptr); + + if (JSON_HEDLEY_LIKELY(m_object->is_object())) + { + return m_it.object_iterator->first; + } + + JSON_THROW(invalid_iterator::create(207, "cannot use key() for non-object iterators", m_object)); + } + + /*! + @brief return the value of an iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference value() const + { + return operator*(); + } + + JSON_PRIVATE_UNLESS_TESTED: + /// associated JSON instance + pointer m_object = nullptr; + /// the actual iterator of the associated instance + internal_iterator<typename std::remove_const<BasicJsonType>::type> m_it {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/iteration_proxy.hpp> + +// #include <nlohmann/detail/iterators/json_reverse_iterator.hpp> + + +#include <cstddef> // ptrdiff_t +#include <iterator> // reverse_iterator +#include <utility> // declval + +namespace nlohmann +{ +namespace detail +{ +////////////////////// +// reverse_iterator // +////////////////////// + +/*! +@brief a template for a reverse iterator class + +@tparam Base the base iterator type to reverse. Valid types are @ref +iterator (to create @ref reverse_iterator) and @ref const_iterator (to +create @ref const_reverse_iterator). + +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +- [OutputIterator](https://en.cppreference.com/w/cpp/named_req/OutputIterator): + It is possible to write to the pointed-to element (only if @a Base is + @ref iterator). + +@since version 1.0.0 +*/ +template<typename Base> +class json_reverse_iterator : public std::reverse_iterator<Base> +{ + public: + using difference_type = std::ptrdiff_t; + /// shortcut to the reverse iterator adapter + using base_iterator = std::reverse_iterator<Base>; + /// the reference type for the pointed-to element + using reference = typename Base::reference; + + /// create reverse iterator from iterator + explicit json_reverse_iterator(const typename base_iterator::iterator_type& it) noexcept + : base_iterator(it) {} + + /// create reverse iterator from base class + explicit json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {} + + /// post-increment (it++) + json_reverse_iterator operator++(int)& // NOLINT(cert-dcl21-cpp) + { + return static_cast<json_reverse_iterator>(base_iterator::operator++(1)); + } + + /// pre-increment (++it) + json_reverse_iterator& operator++() + { + return static_cast<json_reverse_iterator&>(base_iterator::operator++()); + } + + /// post-decrement (it--) + json_reverse_iterator operator--(int)& // NOLINT(cert-dcl21-cpp) + { + return static_cast<json_reverse_iterator>(base_iterator::operator--(1)); + } + + /// pre-decrement (--it) + json_reverse_iterator& operator--() + { + return static_cast<json_reverse_iterator&>(base_iterator::operator--()); + } + + /// add to iterator + json_reverse_iterator& operator+=(difference_type i) + { + return static_cast<json_reverse_iterator&>(base_iterator::operator+=(i)); + } + + /// add to iterator + json_reverse_iterator operator+(difference_type i) const + { + return static_cast<json_reverse_iterator>(base_iterator::operator+(i)); + } + + /// subtract from iterator + json_reverse_iterator operator-(difference_type i) const + { + return static_cast<json_reverse_iterator>(base_iterator::operator-(i)); + } + + /// return difference + difference_type operator-(const json_reverse_iterator& other) const + { + return base_iterator(*this) - base_iterator(other); + } + + /// access to successor + reference operator[](difference_type n) const + { + return *(this->operator+(n)); + } + + /// return the key of an object iterator + auto key() const -> decltype(std::declval<Base>().key()) + { + auto it = --this->base(); + return it.key(); + } + + /// return the value of an iterator + reference value() const + { + auto it = --this->base(); + return it.operator * (); + } +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + +// #include <nlohmann/detail/json_pointer.hpp> + + +#include <algorithm> // all_of +#include <cctype> // isdigit +#include <cerrno> // errno, ERANGE +#include <cstdlib> // strtoull +#include <limits> // max +#include <numeric> // accumulate +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/string_escape.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ + +/// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document +/// @sa https://json.nlohmann.me/api/json_pointer/ +template<typename RefStringType> +class json_pointer +{ + // allow basic_json to access private members + NLOHMANN_BASIC_JSON_TPL_DECLARATION + friend class basic_json; + + template<typename> + friend class json_pointer; + + template<typename T> + struct string_t_helper + { + using type = T; + }; + + NLOHMANN_BASIC_JSON_TPL_DECLARATION + struct string_t_helper<NLOHMANN_BASIC_JSON_TPL> + { + using type = StringType; + }; + + public: + // for backwards compatibility accept BasicJsonType + using string_t = typename string_t_helper<RefStringType>::type; + + /// @brief create JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/json_pointer/ + explicit json_pointer(const string_t& s = "") + : reference_tokens(split(s)) + {} + + /// @brief return a string representation of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/to_string/ + string_t to_string() const + { + return std::accumulate(reference_tokens.begin(), reference_tokens.end(), + string_t{}, + [](const string_t& a, const string_t& b) + { + return detail::concat(a, '/', detail::escape(b)); + }); + } + + /// @brief return a string representation of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_string/ + operator string_t() const + { + return to_string(); + } + + /// @brief append another JSON pointer at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(const json_pointer& ptr) + { + reference_tokens.insert(reference_tokens.end(), + ptr.reference_tokens.begin(), + ptr.reference_tokens.end()); + return *this; + } + + /// @brief append an unescaped reference token at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(string_t token) + { + push_back(std::move(token)); + return *this; + } + + /// @brief append an array index at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(std::size_t array_idx) + { + return *this /= std::to_string(array_idx); + } + + /// @brief create a new JSON pointer by appending the right JSON pointer at the end of the left JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, + const json_pointer& rhs) + { + return json_pointer(lhs) /= rhs; + } + + /// @brief create a new JSON pointer by appending the unescaped token at the end of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, string_t token) // NOLINT(performance-unnecessary-value-param) + { + return json_pointer(lhs) /= std::move(token); + } + + /// @brief create a new JSON pointer by appending the array-index-token at the end of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, std::size_t array_idx) + { + return json_pointer(lhs) /= array_idx; + } + + /// @brief returns the parent of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/parent_pointer/ + json_pointer parent_pointer() const + { + if (empty()) + { + return *this; + } + + json_pointer res = *this; + res.pop_back(); + return res; + } + + /// @brief remove last reference token + /// @sa https://json.nlohmann.me/api/json_pointer/pop_back/ + void pop_back() + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + reference_tokens.pop_back(); + } + + /// @brief return last reference token + /// @sa https://json.nlohmann.me/api/json_pointer/back/ + const string_t& back() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + return reference_tokens.back(); + } + + /// @brief append an unescaped token at the end of the reference pointer + /// @sa https://json.nlohmann.me/api/json_pointer/push_back/ + void push_back(const string_t& token) + { + reference_tokens.push_back(token); + } + + /// @brief append an unescaped token at the end of the reference pointer + /// @sa https://json.nlohmann.me/api/json_pointer/push_back/ + void push_back(string_t&& token) + { + reference_tokens.push_back(std::move(token)); + } + + /// @brief return whether pointer points to the root document + /// @sa https://json.nlohmann.me/api/json_pointer/empty/ + bool empty() const noexcept + { + return reference_tokens.empty(); + } + + private: + /*! + @param[in] s reference token to be converted into an array index + + @return integer representation of @a s + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index begins not with a digit + @throw out_of_range.404 if string @a s could not be converted to an integer + @throw out_of_range.410 if an array index exceeds size_type + */ + template<typename BasicJsonType> + static typename BasicJsonType::size_type array_index(const string_t& s) + { + using size_type = typename BasicJsonType::size_type; + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && s[0] == '0')) + { + JSON_THROW(detail::parse_error::create(106, 0, detail::concat("array index '", s, "' must not begin with '0'"), nullptr)); + } + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && !(s[0] >= '1' && s[0] <= '9'))) + { + JSON_THROW(detail::parse_error::create(109, 0, detail::concat("array index '", s, "' is not a number"), nullptr)); + } + + const char* p = s.c_str(); + char* p_end = nullptr; + errno = 0; // strtoull doesn't reset errno + unsigned long long res = std::strtoull(p, &p_end, 10); // NOLINT(runtime/int) + if (p == p_end // invalid input or empty string + || errno == ERANGE // out of range + || JSON_HEDLEY_UNLIKELY(static_cast<std::size_t>(p_end - p) != s.size())) // incomplete read + { + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", s, "'"), nullptr)); + } + + // only triggered on special platforms (like 32bit), see also + // https://github.com/nlohmann/json/pull/2203 + if (res >= static_cast<unsigned long long>((std::numeric_limits<size_type>::max)())) // NOLINT(runtime/int) + { + JSON_THROW(detail::out_of_range::create(410, detail::concat("array index ", s, " exceeds size_type"), nullptr)); // LCOV_EXCL_LINE + } + + return static_cast<size_type>(res); + } + + JSON_PRIVATE_UNLESS_TESTED: + json_pointer top() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + json_pointer result = *this; + result.reference_tokens = {reference_tokens[0]}; + return result; + } + + private: + /*! + @brief create and return a reference to the pointed to value + + @complexity Linear in the number of reference tokens. + + @throw parse_error.109 if array index is not a number + @throw type_error.313 if value cannot be unflattened + */ + template<typename BasicJsonType> + BasicJsonType& get_and_create(BasicJsonType& j) const + { + auto* result = &j; + + // in case no reference tokens exist, return a reference to the JSON value + // j which will be overwritten by a primitive value + for (const auto& reference_token : reference_tokens) + { + switch (result->type()) + { + case detail::value_t::null: + { + if (reference_token == "0") + { + // start a new array if reference token is 0 + result = &result->operator[](0); + } + else + { + // start a new object otherwise + result = &result->operator[](reference_token); + } + break; + } + + case detail::value_t::object: + { + // create an entry in the object + result = &result->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + // create an entry in the array + result = &result->operator[](array_index<BasicJsonType>(reference_token)); + break; + } + + /* + The following code is only reached if there exists a reference + token _and_ the current value is primitive. In this case, we have + an error situation, because primitive values may only occur as + single value; that is, with an empty list of reference tokens. + */ + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::type_error::create(313, "invalid value to unflatten", &j)); + } + } + + return *result; + } + + /*! + @brief return a reference to the pointed to value + + @note This version does not throw if a value is not present, but tries to + create nested values instead. For instance, calling this function + with pointer `"/this/that"` on a null value is equivalent to calling + `operator[]("this").operator[]("that")` on that value, effectively + changing the null value to an object. + + @param[in] ptr a JSON value + + @return reference to the JSON value pointed to by the JSON pointer + + @complexity Linear in the length of the JSON pointer. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + BasicJsonType& get_unchecked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + // convert null values to arrays or objects before continuing + if (ptr->is_null()) + { + // check if reference token is a number + const bool nums = + std::all_of(reference_token.begin(), reference_token.end(), + [](const unsigned char x) + { + return std::isdigit(x); + }); + + // change value to array for numbers or "-" or to object otherwise + *ptr = (nums || reference_token == "-") + ? detail::value_t::array + : detail::value_t::object; + } + + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (reference_token == "-") + { + // explicitly treat "-" as index beyond the end + ptr = &ptr->operator[](ptr->m_value.array->size()); + } + else + { + // convert array index to number; unchecked access + ptr = &ptr->operator[](array_index<BasicJsonType>(reference_token)); + } + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + BasicJsonType& get_checked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, detail::concat( + "array index '-' (", std::to_string(ptr->m_value.array->size()), + ") is out of range"), ptr)); + } + + // note: at performs range check + ptr = &ptr->at(array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @brief return a const reference to the pointed to value + + @param[in] ptr a JSON value + + @return const reference to the JSON value pointed to by the JSON + pointer + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + const BasicJsonType& get_unchecked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" cannot be used for const access + JSON_THROW(detail::out_of_range::create(402, detail::concat("array index '-' (", std::to_string(ptr->m_value.array->size()), ") is out of range"), ptr)); + } + + // use unchecked array access + ptr = &ptr->operator[](array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + const BasicJsonType& get_checked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, detail::concat( + "array index '-' (", std::to_string(ptr->m_value.array->size()), + ") is out of range"), ptr)); + } + + // note: at performs range check + ptr = &ptr->at(array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + */ + template<typename BasicJsonType> + bool contains(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + if (!ptr->contains(reference_token)) + { + // we did not find the key in the object + return false; + } + + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() == 1 && !("0" <= reference_token && reference_token <= "9"))) + { + // invalid char + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1)) + { + if (JSON_HEDLEY_UNLIKELY(!('1' <= reference_token[0] && reference_token[0] <= '9'))) + { + // first char should be between '1' and '9' + return false; + } + for (std::size_t i = 1; i < reference_token.size(); i++) + { + if (JSON_HEDLEY_UNLIKELY(!('0' <= reference_token[i] && reference_token[i] <= '9'))) + { + // other char should be between '0' and '9' + return false; + } + } + } + + const auto idx = array_index<BasicJsonType>(reference_token); + if (idx >= ptr->size()) + { + // index out of range + return false; + } + + ptr = &ptr->operator[](idx); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + { + // we do not expect primitive values if there is still a + // reference token to process + return false; + } + } + } + + // no reference token left means we found a primitive value + return true; + } + + /*! + @brief split the string input to reference tokens + + @note This function is only called by the json_pointer constructor. + All exceptions below are documented there. + + @throw parse_error.107 if the pointer is not empty or begins with '/' + @throw parse_error.108 if character '~' is not followed by '0' or '1' + */ + static std::vector<string_t> split(const string_t& reference_string) + { + std::vector<string_t> result; + + // special case: empty reference string -> no reference tokens + if (reference_string.empty()) + { + return result; + } + + // check if nonempty reference string begins with slash + if (JSON_HEDLEY_UNLIKELY(reference_string[0] != '/')) + { + JSON_THROW(detail::parse_error::create(107, 1, detail::concat("JSON pointer must be empty or begin with '/' - was: '", reference_string, "'"), nullptr)); + } + + // extract the reference tokens: + // - slash: position of the last read slash (or end of string) + // - start: position after the previous slash + for ( + // search for the first slash after the first character + std::size_t slash = reference_string.find_first_of('/', 1), + // set the beginning of the first reference token + start = 1; + // we can stop if start == 0 (if slash == string_t::npos) + start != 0; + // set the beginning of the next reference token + // (will eventually be 0 if slash == string_t::npos) + start = (slash == string_t::npos) ? 0 : slash + 1, + // find next slash + slash = reference_string.find_first_of('/', start)) + { + // use the text between the beginning of the reference token + // (start) and the last slash (slash). + auto reference_token = reference_string.substr(start, slash - start); + + // check reference tokens are properly escaped + for (std::size_t pos = reference_token.find_first_of('~'); + pos != string_t::npos; + pos = reference_token.find_first_of('~', pos + 1)) + { + JSON_ASSERT(reference_token[pos] == '~'); + + // ~ must be followed by 0 or 1 + if (JSON_HEDLEY_UNLIKELY(pos == reference_token.size() - 1 || + (reference_token[pos + 1] != '0' && + reference_token[pos + 1] != '1'))) + { + JSON_THROW(detail::parse_error::create(108, 0, "escape character '~' must be followed with '0' or '1'", nullptr)); + } + } + + // finally, store the reference token + detail::unescape(reference_token); + result.push_back(reference_token); + } + + return result; + } + + private: + /*! + @param[in] reference_string the reference string to the current value + @param[in] value the value to consider + @param[in,out] result the result object to insert values to + + @note Empty objects or arrays are flattened to `null`. + */ + template<typename BasicJsonType> + static void flatten(const string_t& reference_string, + const BasicJsonType& value, + BasicJsonType& result) + { + switch (value.type()) + { + case detail::value_t::array: + { + if (value.m_value.array->empty()) + { + // flatten empty array as null + result[reference_string] = nullptr; + } + else + { + // iterate array and use index as reference string + for (std::size_t i = 0; i < value.m_value.array->size(); ++i) + { + flatten(detail::concat(reference_string, '/', std::to_string(i)), + value.m_value.array->operator[](i), result); + } + } + break; + } + + case detail::value_t::object: + { + if (value.m_value.object->empty()) + { + // flatten empty object as null + result[reference_string] = nullptr; + } + else + { + // iterate object and use keys as reference string + for (const auto& element : *value.m_value.object) + { + flatten(detail::concat(reference_string, '/', detail::escape(element.first)), element.second, result); + } + } + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + { + // add primitive value with its reference string + result[reference_string] = value; + break; + } + } + } + + /*! + @param[in] value flattened JSON + + @return unflattened JSON + + @throw parse_error.109 if array index is not a number + @throw type_error.314 if value is not an object + @throw type_error.315 if object values are not primitive + @throw type_error.313 if value cannot be unflattened + */ + template<typename BasicJsonType> + static BasicJsonType + unflatten(const BasicJsonType& value) + { + if (JSON_HEDLEY_UNLIKELY(!value.is_object())) + { + JSON_THROW(detail::type_error::create(314, "only objects can be unflattened", &value)); + } + + BasicJsonType result; + + // iterate the JSON object values + for (const auto& element : *value.m_value.object) + { + if (JSON_HEDLEY_UNLIKELY(!element.second.is_primitive())) + { + JSON_THROW(detail::type_error::create(315, "values in object must be primitive", &element.second)); + } + + // assign value to reference pointed to by JSON pointer; Note that if + // the JSON pointer is "" (i.e., points to the whole value), function + // get_and_create returns a reference to result itself. An assignment + // will then create a primitive value. + json_pointer(element.first).get_and_create(result) = element.second; + } + + return result; + } + + // can't use conversion operator because of ambiguity + json_pointer<string_t> convert() const& + { + json_pointer<string_t> result; + result.reference_tokens = reference_tokens; + return result; + } + + json_pointer<string_t> convert()&& + { + json_pointer<string_t> result; + result.reference_tokens = std::move(reference_tokens); + return result; + } + + /*! + @brief compares two JSON pointers for equality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is equal to @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + template<typename RefStringTypeLhs, typename RefStringTypeRhs> + // NOLINTNEXTLINE(readability-redundant-declaration) + friend bool operator==(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept; + + /*! + @brief compares two JSON pointers for inequality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is not equal @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + template<typename RefStringTypeLhs, typename RefStringTypeRhs> + // NOLINTNEXTLINE(readability-redundant-declaration) + friend bool operator!=(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept; + + /// the reference tokens + std::vector<string_t> reference_tokens; +}; + +// functions cannot be defined inside class due to ODR violations +template<typename RefStringTypeLhs, typename RefStringTypeRhs> +inline bool operator==(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept +{ + return lhs.reference_tokens == rhs.reference_tokens; +} + +template<typename RefStringTypeLhs, typename RefStringTypeRhs> +inline bool operator!=(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept +{ + return !(lhs == rhs); +} +} // namespace nlohmann + +// #include <nlohmann/detail/json_ref.hpp> + + +#include <initializer_list> +#include <utility> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename BasicJsonType> +class json_ref +{ + public: + using value_type = BasicJsonType; + + json_ref(value_type&& value) + : owned_value(std::move(value)) + {} + + json_ref(const value_type& value) + : value_ref(&value) + {} + + json_ref(std::initializer_list<json_ref> init) + : owned_value(init) + {} + + template < + class... Args, + enable_if_t<std::is_constructible<value_type, Args...>::value, int> = 0 > + json_ref(Args && ... args) + : owned_value(std::forward<Args>(args)...) + {} + + // class should be movable only + json_ref(json_ref&&) noexcept = default; + json_ref(const json_ref&) = delete; + json_ref& operator=(const json_ref&) = delete; + json_ref& operator=(json_ref&&) = delete; + ~json_ref() = default; + + value_type moved_or_copied() const + { + if (value_ref == nullptr) + { + return std::move(owned_value); + } + return *value_ref; + } + + value_type const& operator*() const + { + return value_ref ? *value_ref : owned_value; + } + + value_type const* operator->() const + { + return &** this; + } + + private: + mutable value_type owned_value = nullptr; + value_type const* value_ref = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/string_escape.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/output/binary_writer.hpp> + + +#include <algorithm> // reverse +#include <array> // array +#include <map> // map +#include <cmath> // isnan, isinf +#include <cstdint> // uint8_t, uint16_t, uint32_t, uint64_t +#include <cstring> // memcpy +#include <limits> // numeric_limits +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/input/binary_reader.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/output/output_adapters.hpp> + + +#include <algorithm> // copy +#include <cstddef> // size_t +#include <iterator> // back_inserter +#include <memory> // shared_ptr, make_shared +#include <string> // basic_string +#include <vector> // vector + +#ifndef JSON_NO_IO + #include <ios> // streamsize + #include <ostream> // basic_ostream +#endif // JSON_NO_IO + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/// abstract output adapter interface +template<typename CharType> struct output_adapter_protocol +{ + virtual void write_character(CharType c) = 0; + virtual void write_characters(const CharType* s, std::size_t length) = 0; + virtual ~output_adapter_protocol() = default; + + output_adapter_protocol() = default; + output_adapter_protocol(const output_adapter_protocol&) = default; + output_adapter_protocol(output_adapter_protocol&&) noexcept = default; + output_adapter_protocol& operator=(const output_adapter_protocol&) = default; + output_adapter_protocol& operator=(output_adapter_protocol&&) noexcept = default; +}; + +/// a type to simplify interfaces +template<typename CharType> +using output_adapter_t = std::shared_ptr<output_adapter_protocol<CharType>>; + +/// output adapter for byte vectors +template<typename CharType, typename AllocatorType = std::allocator<CharType>> +class output_vector_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_vector_adapter(std::vector<CharType, AllocatorType>& vec) noexcept + : v(vec) + {} + + void write_character(CharType c) override + { + v.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + std::copy(s, s + length, std::back_inserter(v)); + } + + private: + std::vector<CharType, AllocatorType>& v; +}; + +#ifndef JSON_NO_IO +/// output adapter for output streams +template<typename CharType> +class output_stream_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_stream_adapter(std::basic_ostream<CharType>& s) noexcept + : stream(s) + {} + + void write_character(CharType c) override + { + stream.put(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + stream.write(s, static_cast<std::streamsize>(length)); + } + + private: + std::basic_ostream<CharType>& stream; +}; +#endif // JSON_NO_IO + +/// output adapter for basic_string +template<typename CharType, typename StringType = std::basic_string<CharType>> +class output_string_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_string_adapter(StringType& s) noexcept + : str(s) + {} + + void write_character(CharType c) override + { + str.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + str.append(s, length); + } + + private: + StringType& str; +}; + +template<typename CharType, typename StringType = std::basic_string<CharType>> +class output_adapter +{ + public: + template<typename AllocatorType = std::allocator<CharType>> + output_adapter(std::vector<CharType, AllocatorType>& vec) + : oa(std::make_shared<output_vector_adapter<CharType, AllocatorType>>(vec)) {} + +#ifndef JSON_NO_IO + output_adapter(std::basic_ostream<CharType>& s) + : oa(std::make_shared<output_stream_adapter<CharType>>(s)) {} +#endif // JSON_NO_IO + + output_adapter(StringType& s) + : oa(std::make_shared<output_string_adapter<CharType, StringType>>(s)) {} + + operator output_adapter_t<CharType>() + { + return oa; + } + + private: + output_adapter_t<CharType> oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_concat.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// binary writer // +/////////////////// + +/*! +@brief serialization to CBOR and MessagePack values +*/ +template<typename BasicJsonType, typename CharType> +class binary_writer +{ + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using number_float_t = typename BasicJsonType::number_float_t; + + public: + /*! + @brief create a binary writer + + @param[in] adapter output adapter to write to + */ + explicit binary_writer(output_adapter_t<CharType> adapter) : oa(std::move(adapter)) + { + JSON_ASSERT(oa); + } + + /*! + @param[in] j JSON value to serialize + @pre j.type() == value_t::object + */ + void write_bson(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + { + write_bson_object(*j.m_value.object); + break; + } + + case value_t::null: + case value_t::array: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + JSON_THROW(type_error::create(317, concat("to serialize to BSON, top-level type must be object, but is ", j.type_name()), &j)); + } + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_cbor(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: + { + oa->write_character(to_char_type(0xF6)); + break; + } + + case value_t::boolean: + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xF5) + : to_char_type(0xF4)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // CBOR does not differentiate between positive signed + // integers and unsigned integers. Therefore, we used the + // code from the value_t::number_unsigned case here. + if (j.m_value.number_integer <= 0x17) + { + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + } + else + { + // The conversions below encode the sign in the first + // byte, and the value is converted to a positive number. + const auto positive_number = -1 - j.m_value.number_integer; + if (j.m_value.number_integer >= -24) + { + write_number(static_cast<std::uint8_t>(0x20 + positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x38)); + write_number(static_cast<std::uint8_t>(positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x39)); + write_number(static_cast<std::uint16_t>(positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x3A)); + write_number(static_cast<std::uint32_t>(positive_number)); + } + else + { + oa->write_character(to_char_type(0x3B)); + write_number(static_cast<std::uint64_t>(positive_number)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= 0x17) + { + write_number(static_cast<std::uint8_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast<std::uint8_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast<std::uint16_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast<std::uint32_t>(j.m_value.number_unsigned)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast<std::uint64_t>(j.m_value.number_unsigned)); + } + break; + } + + case value_t::number_float: + { + if (std::isnan(j.m_value.number_float)) + { + // NaN is 0xf97e00 in CBOR + oa->write_character(to_char_type(0xF9)); + oa->write_character(to_char_type(0x7E)); + oa->write_character(to_char_type(0x00)); + } + else if (std::isinf(j.m_value.number_float)) + { + // Infinity is 0xf97c00, -Infinity is 0xf9fc00 + oa->write_character(to_char_type(0xf9)); + oa->write_character(j.m_value.number_float > 0 ? to_char_type(0x7C) : to_char_type(0xFC)); + oa->write_character(to_char_type(0x00)); + } + else + { + write_compact_float(j.m_value.number_float, detail::input_format_t::cbor); + } + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x60 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x78)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x79)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x7A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x7B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write the string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x80 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x98)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x99)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x9A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x9B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_cbor(el); + } + break; + } + + case value_t::binary: + { + if (j.m_value.binary->has_subtype()) + { + if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint8_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xd8)); + write_number(static_cast<std::uint8_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint16_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xd9)); + write_number(static_cast<std::uint16_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint32_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xda)); + write_number(static_cast<std::uint32_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint64_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xdb)); + write_number(static_cast<std::uint64_t>(j.m_value.binary->subtype())); + } + } + + // step 1: write control byte and the binary array size + const auto N = j.m_value.binary->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x40 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x58)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x59)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x5A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x5B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0xA0 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0xB8)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0xB9)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0xBA)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0xBB)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_cbor(el.first); + write_cbor(el.second); + } + break; + } + + case value_t::discarded: + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_msgpack(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: // nil + { + oa->write_character(to_char_type(0xC0)); + break; + } + + case value_t::boolean: // true and false + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xC3) + : to_char_type(0xC2)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // MessagePack does not differentiate between positive + // signed integers and unsigned integers. Therefore, we used + // the code from the value_t::number_unsigned case here. + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + } + else + { + if (j.m_value.number_integer >= -32) + { + // negative fixnum + write_number(static_cast<std::int8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int8_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int8_t>::max)()) + { + // int 8 + oa->write_character(to_char_type(0xD0)); + write_number(static_cast<std::int8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int16_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int16_t>::max)()) + { + // int 16 + oa->write_character(to_char_type(0xD1)); + write_number(static_cast<std::int16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int32_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int32_t>::max)()) + { + // int 32 + oa->write_character(to_char_type(0xD2)); + write_number(static_cast<std::int32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int64_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int64_t>::max)()) + { + // int 64 + oa->write_character(to_char_type(0xD3)); + write_number(static_cast<std::int64_t>(j.m_value.number_integer)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + break; + } + + case value_t::number_float: + { + write_compact_float(j.m_value.number_float, detail::input_format_t::msgpack); + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 31) + { + // fixstr + write_number(static_cast<std::uint8_t>(0xA0 | N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + // str 8 + oa->write_character(to_char_type(0xD9)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // str 16 + oa->write_character(to_char_type(0xDA)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // str 32 + oa->write_character(to_char_type(0xDB)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write the string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 15) + { + // fixarray + write_number(static_cast<std::uint8_t>(0x90 | N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // array 16 + oa->write_character(to_char_type(0xDC)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // array 32 + oa->write_character(to_char_type(0xDD)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_msgpack(el); + } + break; + } + + case value_t::binary: + { + // step 0: determine if the binary type has a set subtype to + // determine whether or not to use the ext or fixext types + const bool use_ext = j.m_value.binary->has_subtype(); + + // step 1: write control byte and the byte string length + const auto N = j.m_value.binary->size(); + if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + std::uint8_t output_type{}; + bool fixed = true; + if (use_ext) + { + switch (N) + { + case 1: + output_type = 0xD4; // fixext 1 + break; + case 2: + output_type = 0xD5; // fixext 2 + break; + case 4: + output_type = 0xD6; // fixext 4 + break; + case 8: + output_type = 0xD7; // fixext 8 + break; + case 16: + output_type = 0xD8; // fixext 16 + break; + default: + output_type = 0xC7; // ext 8 + fixed = false; + break; + } + + } + else + { + output_type = 0xC4; // bin 8 + fixed = false; + } + + oa->write_character(to_char_type(output_type)); + if (!fixed) + { + write_number(static_cast<std::uint8_t>(N)); + } + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC8 // ext 16 + : 0xC5; // bin 16 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC9 // ext 32 + : 0xC6; // bin 32 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 1.5: if this is an ext type, write the subtype + if (use_ext) + { + write_number(static_cast<std::int8_t>(j.m_value.binary->subtype())); + } + + // step 2: write the byte string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 15) + { + // fixmap + write_number(static_cast<std::uint8_t>(0x80 | (N & 0xF))); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // map 16 + oa->write_character(to_char_type(0xDE)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // map 32 + oa->write_character(to_char_type(0xDF)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_msgpack(el.first); + write_msgpack(el.second); + } + break; + } + + case value_t::discarded: + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + @param[in] use_count whether to use '#' prefixes (optimized format) + @param[in] use_type whether to use '$' prefixes (optimized format) + @param[in] add_prefix whether prefixes need to be used for this value + @param[in] use_bjdata whether write in BJData format, default is false + */ + void write_ubjson(const BasicJsonType& j, const bool use_count, + const bool use_type, const bool add_prefix = true, + const bool use_bjdata = false) + { + switch (j.type()) + { + case value_t::null: + { + if (add_prefix) + { + oa->write_character(to_char_type('Z')); + } + break; + } + + case value_t::boolean: + { + if (add_prefix) + { + oa->write_character(j.m_value.boolean + ? to_char_type('T') + : to_char_type('F')); + } + break; + } + + case value_t::number_integer: + { + write_number_with_ubjson_prefix(j.m_value.number_integer, add_prefix, use_bjdata); + break; + } + + case value_t::number_unsigned: + { + write_number_with_ubjson_prefix(j.m_value.number_unsigned, add_prefix, use_bjdata); + break; + } + + case value_t::number_float: + { + write_number_with_ubjson_prefix(j.m_value.number_float, add_prefix, use_bjdata); + break; + } + + case value_t::string: + { + if (add_prefix) + { + oa->write_character(to_char_type('S')); + } + write_number_with_ubjson_prefix(j.m_value.string->size(), true, use_bjdata); + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.array->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front(), use_bjdata); + const bool same_prefix = std::all_of(j.begin() + 1, j.end(), + [this, first_prefix, use_bjdata](const BasicJsonType & v) + { + return ubjson_prefix(v, use_bjdata) == first_prefix; + }); + + std::vector<CharType> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + if (same_prefix && !(use_bjdata && std::find(bjdx.begin(), bjdx.end(), first_prefix) != bjdx.end())) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.array->size(), true, use_bjdata); + } + + for (const auto& el : *j.m_value.array) + { + write_ubjson(el, use_count, use_type, prefix_required, use_bjdata); + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::binary: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + if (use_type && !j.m_value.binary->empty()) + { + JSON_ASSERT(use_count); + oa->write_character(to_char_type('$')); + oa->write_character('U'); + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.binary->size(), true, use_bjdata); + } + + if (use_type) + { + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + j.m_value.binary->size()); + } + else + { + for (size_t i = 0; i < j.m_value.binary->size(); ++i) + { + oa->write_character(to_char_type('U')); + oa->write_character(j.m_value.binary->data()[i]); + } + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::object: + { + if (use_bjdata && j.m_value.object->size() == 3 && j.m_value.object->find("_ArrayType_") != j.m_value.object->end() && j.m_value.object->find("_ArraySize_") != j.m_value.object->end() && j.m_value.object->find("_ArrayData_") != j.m_value.object->end()) + { + if (!write_bjdata_ndarray(*j.m_value.object, use_count, use_type)) // decode bjdata ndarray in the JData format (https://github.com/NeuroJSON/jdata) + { + break; + } + } + + if (add_prefix) + { + oa->write_character(to_char_type('{')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.object->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front(), use_bjdata); + const bool same_prefix = std::all_of(j.begin(), j.end(), + [this, first_prefix, use_bjdata](const BasicJsonType & v) + { + return ubjson_prefix(v, use_bjdata) == first_prefix; + }); + + std::vector<CharType> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + if (same_prefix && !(use_bjdata && std::find(bjdx.begin(), bjdx.end(), first_prefix) != bjdx.end())) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.object->size(), true, use_bjdata); + } + + for (const auto& el : *j.m_value.object) + { + write_number_with_ubjson_prefix(el.first.size(), true, use_bjdata); + oa->write_characters( + reinterpret_cast<const CharType*>(el.first.c_str()), + el.first.size()); + write_ubjson(el.second, use_count, use_type, prefix_required, use_bjdata); + } + + if (!use_count) + { + oa->write_character(to_char_type('}')); + } + + break; + } + + case value_t::discarded: + default: + break; + } + } + + private: + ////////// + // BSON // + ////////// + + /*! + @return The size of a BSON document entry header, including the id marker + and the entry name size (and its null-terminator). + */ + static std::size_t calc_bson_entry_header_size(const string_t& name, const BasicJsonType& j) + { + const auto it = name.find(static_cast<typename string_t::value_type>(0)); + if (JSON_HEDLEY_UNLIKELY(it != BasicJsonType::string_t::npos)) + { + JSON_THROW(out_of_range::create(409, concat("BSON key cannot contain code point U+0000 (at byte ", std::to_string(it), ")"), &j)); + static_cast<void>(j); + } + + return /*id*/ 1ul + name.size() + /*zero-terminator*/1u; + } + + /*! + @brief Writes the given @a element_type and @a name to the output adapter + */ + void write_bson_entry_header(const string_t& name, + const std::uint8_t element_type) + { + oa->write_character(to_char_type(element_type)); // boolean + oa->write_characters( + reinterpret_cast<const CharType*>(name.c_str()), + name.size() + 1u); + } + + /*! + @brief Writes a BSON element with key @a name and boolean value @a value + */ + void write_bson_boolean(const string_t& name, + const bool value) + { + write_bson_entry_header(name, 0x08); + oa->write_character(value ? to_char_type(0x01) : to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and double value @a value + */ + void write_bson_double(const string_t& name, + const double value) + { + write_bson_entry_header(name, 0x01); + write_number<double>(value, true); + } + + /*! + @return The size of the BSON-encoded string in @a value + */ + static std::size_t calc_bson_string_size(const string_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and string value @a value + */ + void write_bson_string(const string_t& name, + const string_t& value) + { + write_bson_entry_header(name, 0x02); + + write_number<std::int32_t>(static_cast<std::int32_t>(value.size() + 1ul), true); + oa->write_characters( + reinterpret_cast<const CharType*>(value.c_str()), + value.size() + 1); + } + + /*! + @brief Writes a BSON element with key @a name and null value + */ + void write_bson_null(const string_t& name) + { + write_bson_entry_header(name, 0x0A); + } + + /*! + @return The size of the BSON-encoded integer @a value + */ + static std::size_t calc_bson_integer_size(const std::int64_t value) + { + return (std::numeric_limits<std::int32_t>::min)() <= value && value <= (std::numeric_limits<std::int32_t>::max)() + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and integer @a value + */ + void write_bson_integer(const string_t& name, + const std::int64_t value) + { + if ((std::numeric_limits<std::int32_t>::min)() <= value && value <= (std::numeric_limits<std::int32_t>::max)()) + { + write_bson_entry_header(name, 0x10); // int32 + write_number<std::int32_t>(static_cast<std::int32_t>(value), true); + } + else + { + write_bson_entry_header(name, 0x12); // int64 + write_number<std::int64_t>(static_cast<std::int64_t>(value), true); + } + } + + /*! + @return The size of the BSON-encoded unsigned integer in @a j + */ + static constexpr std::size_t calc_bson_unsigned_size(const std::uint64_t value) noexcept + { + return (value <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and unsigned @a value + */ + void write_bson_unsigned(const string_t& name, + const BasicJsonType& j) + { + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + write_bson_entry_header(name, 0x10 /* int32 */); + write_number<std::int32_t>(static_cast<std::int32_t>(j.m_value.number_unsigned), true); + } + else if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + write_bson_entry_header(name, 0x12 /* int64 */); + write_number<std::int64_t>(static_cast<std::int64_t>(j.m_value.number_unsigned), true); + } + else + { + JSON_THROW(out_of_range::create(407, concat("integer number ", std::to_string(j.m_value.number_unsigned), " cannot be represented by BSON as it does not fit int64"), &j)); + } + } + + /*! + @brief Writes a BSON element with key @a name and object @a value + */ + void write_bson_object_entry(const string_t& name, + const typename BasicJsonType::object_t& value) + { + write_bson_entry_header(name, 0x03); // object + write_bson_object(value); + } + + /*! + @return The size of the BSON-encoded array @a value + */ + static std::size_t calc_bson_array_size(const typename BasicJsonType::array_t& value) + { + std::size_t array_index = 0ul; + + const std::size_t embedded_document_size = std::accumulate(std::begin(value), std::end(value), static_cast<std::size_t>(0), [&array_index](std::size_t result, const typename BasicJsonType::array_t::value_type & el) + { + return result + calc_bson_element_size(std::to_string(array_index++), el); + }); + + return sizeof(std::int32_t) + embedded_document_size + 1ul; + } + + /*! + @return The size of the BSON-encoded binary array @a value + */ + static std::size_t calc_bson_binary_size(const typename BasicJsonType::binary_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and array @a value + */ + void write_bson_array(const string_t& name, + const typename BasicJsonType::array_t& value) + { + write_bson_entry_header(name, 0x04); // array + write_number<std::int32_t>(static_cast<std::int32_t>(calc_bson_array_size(value)), true); + + std::size_t array_index = 0ul; + + for (const auto& el : value) + { + write_bson_element(std::to_string(array_index++), el); + } + + oa->write_character(to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and binary value @a value + */ + void write_bson_binary(const string_t& name, + const binary_t& value) + { + write_bson_entry_header(name, 0x05); + + write_number<std::int32_t>(static_cast<std::int32_t>(value.size()), true); + write_number(value.has_subtype() ? static_cast<std::uint8_t>(value.subtype()) : static_cast<std::uint8_t>(0x00)); + + oa->write_characters(reinterpret_cast<const CharType*>(value.data()), value.size()); + } + + /*! + @brief Calculates the size necessary to serialize the JSON value @a j with its @a name + @return The calculated size for the BSON document entry for @a j with the given @a name. + */ + static std::size_t calc_bson_element_size(const string_t& name, + const BasicJsonType& j) + { + const auto header_size = calc_bson_entry_header_size(name, j); + switch (j.type()) + { + case value_t::object: + return header_size + calc_bson_object_size(*j.m_value.object); + + case value_t::array: + return header_size + calc_bson_array_size(*j.m_value.array); + + case value_t::binary: + return header_size + calc_bson_binary_size(*j.m_value.binary); + + case value_t::boolean: + return header_size + 1ul; + + case value_t::number_float: + return header_size + 8ul; + + case value_t::number_integer: + return header_size + calc_bson_integer_size(j.m_value.number_integer); + + case value_t::number_unsigned: + return header_size + calc_bson_unsigned_size(j.m_value.number_unsigned); + + case value_t::string: + return header_size + calc_bson_string_size(*j.m_value.string); + + case value_t::null: + return header_size + 0ul; + + // LCOV_EXCL_START + case value_t::discarded: + default: + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) + return 0ul; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Serializes the JSON value @a j to BSON and associates it with the + key @a name. + @param name The name to associate with the JSON entity @a j within the + current BSON document + */ + void write_bson_element(const string_t& name, + const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + return write_bson_object_entry(name, *j.m_value.object); + + case value_t::array: + return write_bson_array(name, *j.m_value.array); + + case value_t::binary: + return write_bson_binary(name, *j.m_value.binary); + + case value_t::boolean: + return write_bson_boolean(name, j.m_value.boolean); + + case value_t::number_float: + return write_bson_double(name, j.m_value.number_float); + + case value_t::number_integer: + return write_bson_integer(name, j.m_value.number_integer); + + case value_t::number_unsigned: + return write_bson_unsigned(name, j); + + case value_t::string: + return write_bson_string(name, *j.m_value.string); + + case value_t::null: + return write_bson_null(name); + + // LCOV_EXCL_START + case value_t::discarded: + default: + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) + return; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Calculates the size of the BSON serialization of the given + JSON-object @a j. + @param[in] value JSON value to serialize + @pre value.type() == value_t::object + */ + static std::size_t calc_bson_object_size(const typename BasicJsonType::object_t& value) + { + std::size_t document_size = std::accumulate(value.begin(), value.end(), static_cast<std::size_t>(0), + [](size_t result, const typename BasicJsonType::object_t::value_type & el) + { + return result += calc_bson_element_size(el.first, el.second); + }); + + return sizeof(std::int32_t) + document_size + 1ul; + } + + /*! + @param[in] value JSON value to serialize + @pre value.type() == value_t::object + */ + void write_bson_object(const typename BasicJsonType::object_t& value) + { + write_number<std::int32_t>(static_cast<std::int32_t>(calc_bson_object_size(value)), true); + + for (const auto& el : value) + { + write_bson_element(el.first, el.second); + } + + oa->write_character(to_char_type(0x00)); + } + + ////////// + // CBOR // + ////////// + + static constexpr CharType get_cbor_float_prefix(float /*unused*/) + { + return to_char_type(0xFA); // Single-Precision Float + } + + static constexpr CharType get_cbor_float_prefix(double /*unused*/) + { + return to_char_type(0xFB); // Double-Precision Float + } + + ///////////// + // MsgPack // + ///////////// + + static constexpr CharType get_msgpack_float_prefix(float /*unused*/) + { + return to_char_type(0xCA); // float 32 + } + + static constexpr CharType get_msgpack_float_prefix(double /*unused*/) + { + return to_char_type(0xCB); // float 64 + } + + //////////// + // UBJSON // + //////////// + + // UBJSON: write number (floating point) + template<typename NumberType, typename std::enable_if< + std::is_floating_point<NumberType>::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if (add_prefix) + { + oa->write_character(get_ubjson_float_prefix(n)); + } + write_number(n, use_bjdata); + } + + // UBJSON: write number (unsigned integer) + template<typename NumberType, typename std::enable_if< + std::is_unsigned<NumberType>::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int8_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if (n <= (std::numeric_limits<std::uint8_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int16_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast<std::int16_t>(n), use_bjdata); + } + else if (use_bjdata && n <= static_cast<uint64_t>((std::numeric_limits<uint16_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('u')); // uint16 - bjdata only + } + write_number(static_cast<std::uint16_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast<std::int32_t>(n), use_bjdata); + } + else if (use_bjdata && n <= static_cast<uint64_t>((std::numeric_limits<uint32_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('m')); // uint32 - bjdata only + } + write_number(static_cast<std::uint32_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast<std::int64_t>(n), use_bjdata); + } + else if (use_bjdata && n <= (std::numeric_limits<uint64_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('M')); // uint64 - bjdata only + } + write_number(static_cast<std::uint64_t>(n), use_bjdata); + } + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true, use_bjdata); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast<std::uint8_t>(number[i]))); + } + } + } + + // UBJSON: write number (signed integer) + template < typename NumberType, typename std::enable_if < + std::is_signed<NumberType>::value&& + !std::is_floating_point<NumberType>::value, int >::type = 0 > + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if ((std::numeric_limits<std::int8_t>::min)() <= n && n <= (std::numeric_limits<std::int8_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast<std::int8_t>(n), use_bjdata); + } + else if (static_cast<std::int64_t>((std::numeric_limits<std::uint8_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint8_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int16_t>::min)() <= n && n <= (std::numeric_limits<std::int16_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast<std::int16_t>(n), use_bjdata); + } + else if (use_bjdata && (static_cast<std::int64_t>((std::numeric_limits<std::uint16_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint16_t>::max)()))) + { + if (add_prefix) + { + oa->write_character(to_char_type('u')); // uint16 - bjdata only + } + write_number(static_cast<uint16_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int32_t>::min)() <= n && n <= (std::numeric_limits<std::int32_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast<std::int32_t>(n), use_bjdata); + } + else if (use_bjdata && (static_cast<std::int64_t>((std::numeric_limits<std::uint32_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint32_t>::max)()))) + { + if (add_prefix) + { + oa->write_character(to_char_type('m')); // uint32 - bjdata only + } + write_number(static_cast<uint32_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int64_t>::min)() <= n && n <= (std::numeric_limits<std::int64_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast<std::int64_t>(n), use_bjdata); + } + // LCOV_EXCL_START + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true, use_bjdata); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast<std::uint8_t>(number[i]))); + } + } + // LCOV_EXCL_STOP + } + + /*! + @brief determine the type prefix of container values + */ + CharType ubjson_prefix(const BasicJsonType& j, const bool use_bjdata) const noexcept + { + switch (j.type()) + { + case value_t::null: + return 'Z'; + + case value_t::boolean: + return j.m_value.boolean ? 'T' : 'F'; + + case value_t::number_integer: + { + if ((std::numeric_limits<std::int8_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int8_t>::max)()) + { + return 'i'; + } + if ((std::numeric_limits<std::uint8_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint8_t>::max)()) + { + return 'U'; + } + if ((std::numeric_limits<std::int16_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int16_t>::max)()) + { + return 'I'; + } + if (use_bjdata && ((std::numeric_limits<std::uint16_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint16_t>::max)())) + { + return 'u'; + } + if ((std::numeric_limits<std::int32_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int32_t>::max)()) + { + return 'l'; + } + if (use_bjdata && ((std::numeric_limits<std::uint32_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint32_t>::max)())) + { + return 'm'; + } + if ((std::numeric_limits<std::int64_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int64_t>::max)()) + { + return 'L'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int8_t>::max)())) + { + return 'i'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint8_t>::max)())) + { + return 'U'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int16_t>::max)())) + { + return 'I'; + } + if (use_bjdata && j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint16_t>::max)())) + { + return 'u'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + return 'l'; + } + if (use_bjdata && j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint32_t>::max)())) + { + return 'm'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + return 'L'; + } + if (use_bjdata && j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + return 'M'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_float: + return get_ubjson_float_prefix(j.m_value.number_float); + + case value_t::string: + return 'S'; + + case value_t::array: // fallthrough + case value_t::binary: + return '['; + + case value_t::object: + return '{'; + + case value_t::discarded: + default: // discarded values + return 'N'; + } + } + + static constexpr CharType get_ubjson_float_prefix(float /*unused*/) + { + return 'd'; // float 32 + } + + static constexpr CharType get_ubjson_float_prefix(double /*unused*/) + { + return 'D'; // float 64 + } + + /*! + @return false if the object is successfully converted to a bjdata ndarray, true if the type or size is invalid + */ + bool write_bjdata_ndarray(const typename BasicJsonType::object_t& value, const bool use_count, const bool use_type) + { + std::map<string_t, CharType> bjdtype = {{"uint8", 'U'}, {"int8", 'i'}, {"uint16", 'u'}, {"int16", 'I'}, + {"uint32", 'm'}, {"int32", 'l'}, {"uint64", 'M'}, {"int64", 'L'}, {"single", 'd'}, {"double", 'D'}, {"char", 'C'} + }; + + string_t key = "_ArrayType_"; + auto it = bjdtype.find(static_cast<string_t>(value.at(key))); + if (it == bjdtype.end()) + { + return true; + } + CharType dtype = it->second; + + key = "_ArraySize_"; + std::size_t len = (value.at(key).empty() ? 0 : 1); + for (const auto& el : value.at(key)) + { + len *= static_cast<std::size_t>(el.m_value.number_unsigned); + } + + key = "_ArrayData_"; + if (value.at(key).size() != len) + { + return true; + } + + oa->write_character('['); + oa->write_character('$'); + oa->write_character(dtype); + oa->write_character('#'); + + key = "_ArraySize_"; + write_ubjson(value.at(key), use_count, use_type, true, true); + + key = "_ArrayData_"; + if (dtype == 'U' || dtype == 'C') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint8_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'i') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int8_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'u') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint16_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'I') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int16_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'm') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint32_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'l') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int32_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'M') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint64_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'L') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int64_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'd') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<float>(el.m_value.number_float), true); + } + } + else if (dtype == 'D') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<double>(el.m_value.number_float), true); + } + } + return false; + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /* + @brief write a number to output input + @param[in] n number of type @a NumberType + @param[in] OutputIsLittleEndian Set to true if output data is + required to be little endian + @tparam NumberType the type of the number + + @note This function needs to respect the system's endianness, because bytes + in CBOR, MessagePack, and UBJSON are stored in network order (big + endian) and therefore need reordering on little endian systems. + On the other hand, BSON and BJData use little endian and should reorder + on big endian systems. + */ + template<typename NumberType> + void write_number(const NumberType n, const bool OutputIsLittleEndian = false) + { + // step 1: write number to array of length NumberType + std::array<CharType, sizeof(NumberType)> vec{}; + std::memcpy(vec.data(), &n, sizeof(NumberType)); + + // step 2: write array to output (with possible reordering) + if (is_little_endian != OutputIsLittleEndian) + { + // reverse byte order prior to conversion if necessary + std::reverse(vec.begin(), vec.end()); + } + + oa->write_characters(vec.data(), sizeof(NumberType)); + } + + void write_compact_float(const number_float_t n, detail::input_format_t format) + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + if (static_cast<double>(n) >= static_cast<double>(std::numeric_limits<float>::lowest()) && + static_cast<double>(n) <= static_cast<double>((std::numeric_limits<float>::max)()) && + static_cast<double>(static_cast<float>(n)) == static_cast<double>(n)) + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(static_cast<float>(n)) + : get_msgpack_float_prefix(static_cast<float>(n))); + write_number(static_cast<float>(n)); + } + else + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(n) + : get_msgpack_float_prefix(n)); + write_number(n); + } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + public: + // The following to_char_type functions are implement the conversion + // between uint8_t and CharType. In case CharType is not unsigned, + // such a conversion is required to allow values greater than 128. + // See <https://github.com/nlohmann/json/issues/1286> for a discussion. + template < typename C = CharType, + enable_if_t < std::is_signed<C>::value && std::is_signed<char>::value > * = nullptr > + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return *reinterpret_cast<char*>(&x); + } + + template < typename C = CharType, + enable_if_t < std::is_signed<C>::value && std::is_unsigned<char>::value > * = nullptr > + static CharType to_char_type(std::uint8_t x) noexcept + { + static_assert(sizeof(std::uint8_t) == sizeof(CharType), "size of CharType must be equal to std::uint8_t"); + static_assert(std::is_trivial<CharType>::value, "CharType must be trivial"); + CharType result; + std::memcpy(&result, &x, sizeof(x)); + return result; + } + + template<typename C = CharType, + enable_if_t<std::is_unsigned<C>::value>* = nullptr> + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return x; + } + + template < typename InputCharType, typename C = CharType, + enable_if_t < + std::is_signed<C>::value && + std::is_signed<char>::value && + std::is_same<char, typename std::remove_cv<InputCharType>::type>::value + > * = nullptr > + static constexpr CharType to_char_type(InputCharType x) noexcept + { + return x; + } + + private: + /// whether we can assume little endianness + const bool is_little_endian = little_endianness(); + + /// the output + output_adapter_t<CharType> oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/output/output_adapters.hpp> + +// #include <nlohmann/detail/output/serializer.hpp> + + +#include <algorithm> // reverse, remove, fill, find, none_of +#include <array> // array +#include <clocale> // localeconv, lconv +#include <cmath> // labs, isfinite, isnan, signbit +#include <cstddef> // size_t, ptrdiff_t +#include <cstdint> // uint8_t +#include <cstdio> // snprintf +#include <limits> // numeric_limits +#include <string> // string, char_traits +#include <iomanip> // setfill, setw +#include <type_traits> // is_same +#include <utility> // move + +// #include <nlohmann/detail/conversions/to_chars.hpp> + + +#include <array> // array +#include <cmath> // signbit, isfinite +#include <cstdint> // intN_t, uintN_t +#include <cstring> // memcpy, memmove +#include <limits> // numeric_limits +#include <type_traits> // conditional + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/*! +@brief implements the Grisu2 algorithm for binary to decimal floating-point +conversion. + +This implementation is a slightly modified version of the reference +implementation which may be obtained from +http://florian.loitsch.com/publications (bench.tar.gz). + +The code is distributed under the MIT license, Copyright (c) 2009 Florian Loitsch. + +For a detailed description of the algorithm see: + +[1] Loitsch, "Printing Floating-Point Numbers Quickly and Accurately with + Integers", Proceedings of the ACM SIGPLAN 2010 Conference on Programming + Language Design and Implementation, PLDI 2010 +[2] Burger, Dybvig, "Printing Floating-Point Numbers Quickly and Accurately", + Proceedings of the ACM SIGPLAN 1996 Conference on Programming Language + Design and Implementation, PLDI 1996 +*/ +namespace dtoa_impl +{ + +template<typename Target, typename Source> +Target reinterpret_bits(const Source source) +{ + static_assert(sizeof(Target) == sizeof(Source), "size mismatch"); + + Target target; + std::memcpy(&target, &source, sizeof(Source)); + return target; +} + +struct diyfp // f * 2^e +{ + static constexpr int kPrecision = 64; // = q + + std::uint64_t f = 0; + int e = 0; + + constexpr diyfp(std::uint64_t f_, int e_) noexcept : f(f_), e(e_) {} + + /*! + @brief returns x - y + @pre x.e == y.e and x.f >= y.f + */ + static diyfp sub(const diyfp& x, const diyfp& y) noexcept + { + JSON_ASSERT(x.e == y.e); + JSON_ASSERT(x.f >= y.f); + + return {x.f - y.f, x.e}; + } + + /*! + @brief returns x * y + @note The result is rounded. (Only the upper q bits are returned.) + */ + static diyfp mul(const diyfp& x, const diyfp& y) noexcept + { + static_assert(kPrecision == 64, "internal error"); + + // Computes: + // f = round((x.f * y.f) / 2^q) + // e = x.e + y.e + q + + // Emulate the 64-bit * 64-bit multiplication: + // + // p = u * v + // = (u_lo + 2^32 u_hi) (v_lo + 2^32 v_hi) + // = (u_lo v_lo ) + 2^32 ((u_lo v_hi ) + (u_hi v_lo )) + 2^64 (u_hi v_hi ) + // = (p0 ) + 2^32 ((p1 ) + (p2 )) + 2^64 (p3 ) + // = (p0_lo + 2^32 p0_hi) + 2^32 ((p1_lo + 2^32 p1_hi) + (p2_lo + 2^32 p2_hi)) + 2^64 (p3 ) + // = (p0_lo ) + 2^32 (p0_hi + p1_lo + p2_lo ) + 2^64 (p1_hi + p2_hi + p3) + // = (p0_lo ) + 2^32 (Q ) + 2^64 (H ) + // = (p0_lo ) + 2^32 (Q_lo + 2^32 Q_hi ) + 2^64 (H ) + // + // (Since Q might be larger than 2^32 - 1) + // + // = (p0_lo + 2^32 Q_lo) + 2^64 (Q_hi + H) + // + // (Q_hi + H does not overflow a 64-bit int) + // + // = p_lo + 2^64 p_hi + + const std::uint64_t u_lo = x.f & 0xFFFFFFFFu; + const std::uint64_t u_hi = x.f >> 32u; + const std::uint64_t v_lo = y.f & 0xFFFFFFFFu; + const std::uint64_t v_hi = y.f >> 32u; + + const std::uint64_t p0 = u_lo * v_lo; + const std::uint64_t p1 = u_lo * v_hi; + const std::uint64_t p2 = u_hi * v_lo; + const std::uint64_t p3 = u_hi * v_hi; + + const std::uint64_t p0_hi = p0 >> 32u; + const std::uint64_t p1_lo = p1 & 0xFFFFFFFFu; + const std::uint64_t p1_hi = p1 >> 32u; + const std::uint64_t p2_lo = p2 & 0xFFFFFFFFu; + const std::uint64_t p2_hi = p2 >> 32u; + + std::uint64_t Q = p0_hi + p1_lo + p2_lo; + + // The full product might now be computed as + // + // p_hi = p3 + p2_hi + p1_hi + (Q >> 32) + // p_lo = p0_lo + (Q << 32) + // + // But in this particular case here, the full p_lo is not required. + // Effectively we only need to add the highest bit in p_lo to p_hi (and + // Q_hi + 1 does not overflow). + + Q += std::uint64_t{1} << (64u - 32u - 1u); // round, ties up + + const std::uint64_t h = p3 + p2_hi + p1_hi + (Q >> 32u); + + return {h, x.e + y.e + 64}; + } + + /*! + @brief normalize x such that the significand is >= 2^(q-1) + @pre x.f != 0 + */ + static diyfp normalize(diyfp x) noexcept + { + JSON_ASSERT(x.f != 0); + + while ((x.f >> 63u) == 0) + { + x.f <<= 1u; + x.e--; + } + + return x; + } + + /*! + @brief normalize x such that the result has the exponent E + @pre e >= x.e and the upper e - x.e bits of x.f must be zero. + */ + static diyfp normalize_to(const diyfp& x, const int target_exponent) noexcept + { + const int delta = x.e - target_exponent; + + JSON_ASSERT(delta >= 0); + JSON_ASSERT(((x.f << delta) >> delta) == x.f); + + return {x.f << delta, target_exponent}; + } +}; + +struct boundaries +{ + diyfp w; + diyfp minus; + diyfp plus; +}; + +/*! +Compute the (normalized) diyfp representing the input number 'value' and its +boundaries. + +@pre value must be finite and positive +*/ +template<typename FloatType> +boundaries compute_boundaries(FloatType value) +{ + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // Convert the IEEE representation into a diyfp. + // + // If v is denormal: + // value = 0.F * 2^(1 - bias) = ( F) * 2^(1 - bias - (p-1)) + // If v is normalized: + // value = 1.F * 2^(E - bias) = (2^(p-1) + F) * 2^(E - bias - (p-1)) + + static_assert(std::numeric_limits<FloatType>::is_iec559, + "internal error: dtoa_short requires an IEEE-754 floating-point implementation"); + + constexpr int kPrecision = std::numeric_limits<FloatType>::digits; // = p (includes the hidden bit) + constexpr int kBias = std::numeric_limits<FloatType>::max_exponent - 1 + (kPrecision - 1); + constexpr int kMinExp = 1 - kBias; + constexpr std::uint64_t kHiddenBit = std::uint64_t{1} << (kPrecision - 1); // = 2^(p-1) + + using bits_type = typename std::conditional<kPrecision == 24, std::uint32_t, std::uint64_t >::type; + + const auto bits = static_cast<std::uint64_t>(reinterpret_bits<bits_type>(value)); + const std::uint64_t E = bits >> (kPrecision - 1); + const std::uint64_t F = bits & (kHiddenBit - 1); + + const bool is_denormal = E == 0; + const diyfp v = is_denormal + ? diyfp(F, kMinExp) + : diyfp(F + kHiddenBit, static_cast<int>(E) - kBias); + + // Compute the boundaries m- and m+ of the floating-point value + // v = f * 2^e. + // + // Determine v- and v+, the floating-point predecessor and successor if v, + // respectively. + // + // v- = v - 2^e if f != 2^(p-1) or e == e_min (A) + // = v - 2^(e-1) if f == 2^(p-1) and e > e_min (B) + // + // v+ = v + 2^e + // + // Let m- = (v- + v) / 2 and m+ = (v + v+) / 2. All real numbers _strictly_ + // between m- and m+ round to v, regardless of how the input rounding + // algorithm breaks ties. + // + // ---+-------------+-------------+-------------+-------------+--- (A) + // v- m- v m+ v+ + // + // -----------------+------+------+-------------+-------------+--- (B) + // v- m- v m+ v+ + + const bool lower_boundary_is_closer = F == 0 && E > 1; + const diyfp m_plus = diyfp(2 * v.f + 1, v.e - 1); + const diyfp m_minus = lower_boundary_is_closer + ? diyfp(4 * v.f - 1, v.e - 2) // (B) + : diyfp(2 * v.f - 1, v.e - 1); // (A) + + // Determine the normalized w+ = m+. + const diyfp w_plus = diyfp::normalize(m_plus); + + // Determine w- = m- such that e_(w-) = e_(w+). + const diyfp w_minus = diyfp::normalize_to(m_minus, w_plus.e); + + return {diyfp::normalize(v), w_minus, w_plus}; +} + +// Given normalized diyfp w, Grisu needs to find a (normalized) cached +// power-of-ten c, such that the exponent of the product c * w = f * 2^e lies +// within a certain range [alpha, gamma] (Definition 3.2 from [1]) +// +// alpha <= e = e_c + e_w + q <= gamma +// +// or +// +// f_c * f_w * 2^alpha <= f_c 2^(e_c) * f_w 2^(e_w) * 2^q +// <= f_c * f_w * 2^gamma +// +// Since c and w are normalized, i.e. 2^(q-1) <= f < 2^q, this implies +// +// 2^(q-1) * 2^(q-1) * 2^alpha <= c * w * 2^q < 2^q * 2^q * 2^gamma +// +// or +// +// 2^(q - 2 + alpha) <= c * w < 2^(q + gamma) +// +// The choice of (alpha,gamma) determines the size of the table and the form of +// the digit generation procedure. Using (alpha,gamma)=(-60,-32) works out well +// in practice: +// +// The idea is to cut the number c * w = f * 2^e into two parts, which can be +// processed independently: An integral part p1, and a fractional part p2: +// +// f * 2^e = ( (f div 2^-e) * 2^-e + (f mod 2^-e) ) * 2^e +// = (f div 2^-e) + (f mod 2^-e) * 2^e +// = p1 + p2 * 2^e +// +// The conversion of p1 into decimal form requires a series of divisions and +// modulos by (a power of) 10. These operations are faster for 32-bit than for +// 64-bit integers, so p1 should ideally fit into a 32-bit integer. This can be +// achieved by choosing +// +// -e >= 32 or e <= -32 := gamma +// +// In order to convert the fractional part +// +// p2 * 2^e = p2 / 2^-e = d[-1] / 10^1 + d[-2] / 10^2 + ... +// +// into decimal form, the fraction is repeatedly multiplied by 10 and the digits +// d[-i] are extracted in order: +// +// (10 * p2) div 2^-e = d[-1] +// (10 * p2) mod 2^-e = d[-2] / 10^1 + ... +// +// The multiplication by 10 must not overflow. It is sufficient to choose +// +// 10 * p2 < 16 * p2 = 2^4 * p2 <= 2^64. +// +// Since p2 = f mod 2^-e < 2^-e, +// +// -e <= 60 or e >= -60 := alpha + +constexpr int kAlpha = -60; +constexpr int kGamma = -32; + +struct cached_power // c = f * 2^e ~= 10^k +{ + std::uint64_t f; + int e; + int k; +}; + +/*! +For a normalized diyfp w = f * 2^e, this function returns a (normalized) cached +power-of-ten c = f_c * 2^e_c, such that the exponent of the product w * c +satisfies (Definition 3.2 from [1]) + + alpha <= e_c + e + q <= gamma. +*/ +inline cached_power get_cached_power_for_binary_exponent(int e) +{ + // Now + // + // alpha <= e_c + e + q <= gamma (1) + // ==> f_c * 2^alpha <= c * 2^e * 2^q + // + // and since the c's are normalized, 2^(q-1) <= f_c, + // + // ==> 2^(q - 1 + alpha) <= c * 2^(e + q) + // ==> 2^(alpha - e - 1) <= c + // + // If c were an exact power of ten, i.e. c = 10^k, one may determine k as + // + // k = ceil( log_10( 2^(alpha - e - 1) ) ) + // = ceil( (alpha - e - 1) * log_10(2) ) + // + // From the paper: + // "In theory the result of the procedure could be wrong since c is rounded, + // and the computation itself is approximated [...]. In practice, however, + // this simple function is sufficient." + // + // For IEEE double precision floating-point numbers converted into + // normalized diyfp's w = f * 2^e, with q = 64, + // + // e >= -1022 (min IEEE exponent) + // -52 (p - 1) + // -52 (p - 1, possibly normalize denormal IEEE numbers) + // -11 (normalize the diyfp) + // = -1137 + // + // and + // + // e <= +1023 (max IEEE exponent) + // -52 (p - 1) + // -11 (normalize the diyfp) + // = 960 + // + // This binary exponent range [-1137,960] results in a decimal exponent + // range [-307,324]. One does not need to store a cached power for each + // k in this range. For each such k it suffices to find a cached power + // such that the exponent of the product lies in [alpha,gamma]. + // This implies that the difference of the decimal exponents of adjacent + // table entries must be less than or equal to + // + // floor( (gamma - alpha) * log_10(2) ) = 8. + // + // (A smaller distance gamma-alpha would require a larger table.) + + // NB: + // Actually this function returns c, such that -60 <= e_c + e + 64 <= -34. + + constexpr int kCachedPowersMinDecExp = -300; + constexpr int kCachedPowersDecStep = 8; + + static constexpr std::array<cached_power, 79> kCachedPowers = + { + { + { 0xAB70FE17C79AC6CA, -1060, -300 }, + { 0xFF77B1FCBEBCDC4F, -1034, -292 }, + { 0xBE5691EF416BD60C, -1007, -284 }, + { 0x8DD01FAD907FFC3C, -980, -276 }, + { 0xD3515C2831559A83, -954, -268 }, + { 0x9D71AC8FADA6C9B5, -927, -260 }, + { 0xEA9C227723EE8BCB, -901, -252 }, + { 0xAECC49914078536D, -874, -244 }, + { 0x823C12795DB6CE57, -847, -236 }, + { 0xC21094364DFB5637, -821, -228 }, + { 0x9096EA6F3848984F, -794, -220 }, + { 0xD77485CB25823AC7, -768, -212 }, + { 0xA086CFCD97BF97F4, -741, -204 }, + { 0xEF340A98172AACE5, -715, -196 }, + { 0xB23867FB2A35B28E, -688, -188 }, + { 0x84C8D4DFD2C63F3B, -661, -180 }, + { 0xC5DD44271AD3CDBA, -635, -172 }, + { 0x936B9FCEBB25C996, -608, -164 }, + { 0xDBAC6C247D62A584, -582, -156 }, + { 0xA3AB66580D5FDAF6, -555, -148 }, + { 0xF3E2F893DEC3F126, -529, -140 }, + { 0xB5B5ADA8AAFF80B8, -502, -132 }, + { 0x87625F056C7C4A8B, -475, -124 }, + { 0xC9BCFF6034C13053, -449, -116 }, + { 0x964E858C91BA2655, -422, -108 }, + { 0xDFF9772470297EBD, -396, -100 }, + { 0xA6DFBD9FB8E5B88F, -369, -92 }, + { 0xF8A95FCF88747D94, -343, -84 }, + { 0xB94470938FA89BCF, -316, -76 }, + { 0x8A08F0F8BF0F156B, -289, -68 }, + { 0xCDB02555653131B6, -263, -60 }, + { 0x993FE2C6D07B7FAC, -236, -52 }, + { 0xE45C10C42A2B3B06, -210, -44 }, + { 0xAA242499697392D3, -183, -36 }, + { 0xFD87B5F28300CA0E, -157, -28 }, + { 0xBCE5086492111AEB, -130, -20 }, + { 0x8CBCCC096F5088CC, -103, -12 }, + { 0xD1B71758E219652C, -77, -4 }, + { 0x9C40000000000000, -50, 4 }, + { 0xE8D4A51000000000, -24, 12 }, + { 0xAD78EBC5AC620000, 3, 20 }, + { 0x813F3978F8940984, 30, 28 }, + { 0xC097CE7BC90715B3, 56, 36 }, + { 0x8F7E32CE7BEA5C70, 83, 44 }, + { 0xD5D238A4ABE98068, 109, 52 }, + { 0x9F4F2726179A2245, 136, 60 }, + { 0xED63A231D4C4FB27, 162, 68 }, + { 0xB0DE65388CC8ADA8, 189, 76 }, + { 0x83C7088E1AAB65DB, 216, 84 }, + { 0xC45D1DF942711D9A, 242, 92 }, + { 0x924D692CA61BE758, 269, 100 }, + { 0xDA01EE641A708DEA, 295, 108 }, + { 0xA26DA3999AEF774A, 322, 116 }, + { 0xF209787BB47D6B85, 348, 124 }, + { 0xB454E4A179DD1877, 375, 132 }, + { 0x865B86925B9BC5C2, 402, 140 }, + { 0xC83553C5C8965D3D, 428, 148 }, + { 0x952AB45CFA97A0B3, 455, 156 }, + { 0xDE469FBD99A05FE3, 481, 164 }, + { 0xA59BC234DB398C25, 508, 172 }, + { 0xF6C69A72A3989F5C, 534, 180 }, + { 0xB7DCBF5354E9BECE, 561, 188 }, + { 0x88FCF317F22241E2, 588, 196 }, + { 0xCC20CE9BD35C78A5, 614, 204 }, + { 0x98165AF37B2153DF, 641, 212 }, + { 0xE2A0B5DC971F303A, 667, 220 }, + { 0xA8D9D1535CE3B396, 694, 228 }, + { 0xFB9B7CD9A4A7443C, 720, 236 }, + { 0xBB764C4CA7A44410, 747, 244 }, + { 0x8BAB8EEFB6409C1A, 774, 252 }, + { 0xD01FEF10A657842C, 800, 260 }, + { 0x9B10A4E5E9913129, 827, 268 }, + { 0xE7109BFBA19C0C9D, 853, 276 }, + { 0xAC2820D9623BF429, 880, 284 }, + { 0x80444B5E7AA7CF85, 907, 292 }, + { 0xBF21E44003ACDD2D, 933, 300 }, + { 0x8E679C2F5E44FF8F, 960, 308 }, + { 0xD433179D9C8CB841, 986, 316 }, + { 0x9E19DB92B4E31BA9, 1013, 324 }, + } + }; + + // This computation gives exactly the same results for k as + // k = ceil((kAlpha - e - 1) * 0.30102999566398114) + // for |e| <= 1500, but doesn't require floating-point operations. + // NB: log_10(2) ~= 78913 / 2^18 + JSON_ASSERT(e >= -1500); + JSON_ASSERT(e <= 1500); + const int f = kAlpha - e - 1; + const int k = (f * 78913) / (1 << 18) + static_cast<int>(f > 0); + + const int index = (-kCachedPowersMinDecExp + k + (kCachedPowersDecStep - 1)) / kCachedPowersDecStep; + JSON_ASSERT(index >= 0); + JSON_ASSERT(static_cast<std::size_t>(index) < kCachedPowers.size()); + + const cached_power cached = kCachedPowers[static_cast<std::size_t>(index)]; + JSON_ASSERT(kAlpha <= cached.e + e + 64); + JSON_ASSERT(kGamma >= cached.e + e + 64); + + return cached; +} + +/*! +For n != 0, returns k, such that pow10 := 10^(k-1) <= n < 10^k. +For n == 0, returns 1 and sets pow10 := 1. +*/ +inline int find_largest_pow10(const std::uint32_t n, std::uint32_t& pow10) +{ + // LCOV_EXCL_START + if (n >= 1000000000) + { + pow10 = 1000000000; + return 10; + } + // LCOV_EXCL_STOP + if (n >= 100000000) + { + pow10 = 100000000; + return 9; + } + if (n >= 10000000) + { + pow10 = 10000000; + return 8; + } + if (n >= 1000000) + { + pow10 = 1000000; + return 7; + } + if (n >= 100000) + { + pow10 = 100000; + return 6; + } + if (n >= 10000) + { + pow10 = 10000; + return 5; + } + if (n >= 1000) + { + pow10 = 1000; + return 4; + } + if (n >= 100) + { + pow10 = 100; + return 3; + } + if (n >= 10) + { + pow10 = 10; + return 2; + } + + pow10 = 1; + return 1; +} + +inline void grisu2_round(char* buf, int len, std::uint64_t dist, std::uint64_t delta, + std::uint64_t rest, std::uint64_t ten_k) +{ + JSON_ASSERT(len >= 1); + JSON_ASSERT(dist <= delta); + JSON_ASSERT(rest <= delta); + JSON_ASSERT(ten_k > 0); + + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // ten_k + // <------> + // <---- rest ----> + // --------------[------------------+----+--------------]-------------- + // w V + // = buf * 10^k + // + // ten_k represents a unit-in-the-last-place in the decimal representation + // stored in buf. + // Decrement buf by ten_k while this takes buf closer to w. + + // The tests are written in this order to avoid overflow in unsigned + // integer arithmetic. + + while (rest < dist + && delta - rest >= ten_k + && (rest + ten_k < dist || dist - rest > rest + ten_k - dist)) + { + JSON_ASSERT(buf[len - 1] != '0'); + buf[len - 1]--; + rest += ten_k; + } +} + +/*! +Generates V = buffer * 10^decimal_exponent, such that M- <= V <= M+. +M- and M+ must be normalized and share the same exponent -60 <= e <= -32. +*/ +inline void grisu2_digit_gen(char* buffer, int& length, int& decimal_exponent, + diyfp M_minus, diyfp w, diyfp M_plus) +{ + static_assert(kAlpha >= -60, "internal error"); + static_assert(kGamma <= -32, "internal error"); + + // Generates the digits (and the exponent) of a decimal floating-point + // number V = buffer * 10^decimal_exponent in the range [M-, M+]. The diyfp's + // w, M- and M+ share the same exponent e, which satisfies alpha <= e <= gamma. + // + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // Grisu2 generates the digits of M+ from left to right and stops as soon as + // V is in [M-,M+]. + + JSON_ASSERT(M_plus.e >= kAlpha); + JSON_ASSERT(M_plus.e <= kGamma); + + std::uint64_t delta = diyfp::sub(M_plus, M_minus).f; // (significand of (M+ - M-), implicit exponent is e) + std::uint64_t dist = diyfp::sub(M_plus, w ).f; // (significand of (M+ - w ), implicit exponent is e) + + // Split M+ = f * 2^e into two parts p1 and p2 (note: e < 0): + // + // M+ = f * 2^e + // = ((f div 2^-e) * 2^-e + (f mod 2^-e)) * 2^e + // = ((p1 ) * 2^-e + (p2 )) * 2^e + // = p1 + p2 * 2^e + + const diyfp one(std::uint64_t{1} << -M_plus.e, M_plus.e); + + auto p1 = static_cast<std::uint32_t>(M_plus.f >> -one.e); // p1 = f div 2^-e (Since -e >= 32, p1 fits into a 32-bit int.) + std::uint64_t p2 = M_plus.f & (one.f - 1); // p2 = f mod 2^-e + + // 1) + // + // Generate the digits of the integral part p1 = d[n-1]...d[1]d[0] + + JSON_ASSERT(p1 > 0); + + std::uint32_t pow10{}; + const int k = find_largest_pow10(p1, pow10); + + // 10^(k-1) <= p1 < 10^k, pow10 = 10^(k-1) + // + // p1 = (p1 div 10^(k-1)) * 10^(k-1) + (p1 mod 10^(k-1)) + // = (d[k-1] ) * 10^(k-1) + (p1 mod 10^(k-1)) + // + // M+ = p1 + p2 * 2^e + // = d[k-1] * 10^(k-1) + (p1 mod 10^(k-1)) + p2 * 2^e + // = d[k-1] * 10^(k-1) + ((p1 mod 10^(k-1)) * 2^-e + p2) * 2^e + // = d[k-1] * 10^(k-1) + ( rest) * 2^e + // + // Now generate the digits d[n] of p1 from left to right (n = k-1,...,0) + // + // p1 = d[k-1]...d[n] * 10^n + d[n-1]...d[0] + // + // but stop as soon as + // + // rest * 2^e = (d[n-1]...d[0] * 2^-e + p2) * 2^e <= delta * 2^e + + int n = k; + while (n > 0) + { + // Invariants: + // M+ = buffer * 10^n + (p1 + p2 * 2^e) (buffer = 0 for n = k) + // pow10 = 10^(n-1) <= p1 < 10^n + // + const std::uint32_t d = p1 / pow10; // d = p1 div 10^(n-1) + const std::uint32_t r = p1 % pow10; // r = p1 mod 10^(n-1) + // + // M+ = buffer * 10^n + (d * 10^(n-1) + r) + p2 * 2^e + // = (buffer * 10 + d) * 10^(n-1) + (r + p2 * 2^e) + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast<char>('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(n-1) + (r + p2 * 2^e) + // + p1 = r; + n--; + // + // M+ = buffer * 10^n + (p1 + p2 * 2^e) + // pow10 = 10^n + // + + // Now check if enough digits have been generated. + // Compute + // + // p1 + p2 * 2^e = (p1 * 2^-e + p2) * 2^e = rest * 2^e + // + // Note: + // Since rest and delta share the same exponent e, it suffices to + // compare the significands. + const std::uint64_t rest = (std::uint64_t{p1} << -one.e) + p2; + if (rest <= delta) + { + // V = buffer * 10^n, with M- <= V <= M+. + + decimal_exponent += n; + + // We may now just stop. But instead look if the buffer could be + // decremented to bring V closer to w. + // + // pow10 = 10^n is now 1 ulp in the decimal representation V. + // The rounding procedure works with diyfp's with an implicit + // exponent of e. + // + // 10^n = (10^n * 2^-e) * 2^e = ulp * 2^e + // + const std::uint64_t ten_n = std::uint64_t{pow10} << -one.e; + grisu2_round(buffer, length, dist, delta, rest, ten_n); + + return; + } + + pow10 /= 10; + // + // pow10 = 10^(n-1) <= p1 < 10^n + // Invariants restored. + } + + // 2) + // + // The digits of the integral part have been generated: + // + // M+ = d[k-1]...d[1]d[0] + p2 * 2^e + // = buffer + p2 * 2^e + // + // Now generate the digits of the fractional part p2 * 2^e. + // + // Note: + // No decimal point is generated: the exponent is adjusted instead. + // + // p2 actually represents the fraction + // + // p2 * 2^e + // = p2 / 2^-e + // = d[-1] / 10^1 + d[-2] / 10^2 + ... + // + // Now generate the digits d[-m] of p1 from left to right (m = 1,2,...) + // + // p2 * 2^e = d[-1]d[-2]...d[-m] * 10^-m + // + 10^-m * (d[-m-1] / 10^1 + d[-m-2] / 10^2 + ...) + // + // using + // + // 10^m * p2 = ((10^m * p2) div 2^-e) * 2^-e + ((10^m * p2) mod 2^-e) + // = ( d) * 2^-e + ( r) + // + // or + // 10^m * p2 * 2^e = d + r * 2^e + // + // i.e. + // + // M+ = buffer + p2 * 2^e + // = buffer + 10^-m * (d + r * 2^e) + // = (buffer * 10^m + d) * 10^-m + 10^-m * r * 2^e + // + // and stop as soon as 10^-m * r * 2^e <= delta * 2^e + + JSON_ASSERT(p2 > delta); + + int m = 0; + for (;;) + { + // Invariant: + // M+ = buffer * 10^-m + 10^-m * (d[-m-1] / 10 + d[-m-2] / 10^2 + ...) * 2^e + // = buffer * 10^-m + 10^-m * (p2 ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (10 * p2) ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * ((10*p2 div 2^-e) * 2^-e + (10*p2 mod 2^-e)) * 2^e + // + JSON_ASSERT(p2 <= (std::numeric_limits<std::uint64_t>::max)() / 10); + p2 *= 10; + const std::uint64_t d = p2 >> -one.e; // d = (10 * p2) div 2^-e + const std::uint64_t r = p2 & (one.f - 1); // r = (10 * p2) mod 2^-e + // + // M+ = buffer * 10^-m + 10^-m * (1/10 * (d * 2^-e + r) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (d + r * 2^e)) + // = (buffer * 10 + d) * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast<char>('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + p2 = r; + m++; + // + // M+ = buffer * 10^-m + 10^-m * p2 * 2^e + // Invariant restored. + + // Check if enough digits have been generated. + // + // 10^-m * p2 * 2^e <= delta * 2^e + // p2 * 2^e <= 10^m * delta * 2^e + // p2 <= 10^m * delta + delta *= 10; + dist *= 10; + if (p2 <= delta) + { + break; + } + } + + // V = buffer * 10^-m, with M- <= V <= M+. + + decimal_exponent -= m; + + // 1 ulp in the decimal representation is now 10^-m. + // Since delta and dist are now scaled by 10^m, we need to do the + // same with ulp in order to keep the units in sync. + // + // 10^m * 10^-m = 1 = 2^-e * 2^e = ten_m * 2^e + // + const std::uint64_t ten_m = one.f; + grisu2_round(buffer, length, dist, delta, p2, ten_m); + + // By construction this algorithm generates the shortest possible decimal + // number (Loitsch, Theorem 6.2) which rounds back to w. + // For an input number of precision p, at least + // + // N = 1 + ceil(p * log_10(2)) + // + // decimal digits are sufficient to identify all binary floating-point + // numbers (Matula, "In-and-Out conversions"). + // This implies that the algorithm does not produce more than N decimal + // digits. + // + // N = 17 for p = 53 (IEEE double precision) + // N = 9 for p = 24 (IEEE single precision) +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +JSON_HEDLEY_NON_NULL(1) +inline void grisu2(char* buf, int& len, int& decimal_exponent, + diyfp m_minus, diyfp v, diyfp m_plus) +{ + JSON_ASSERT(m_plus.e == m_minus.e); + JSON_ASSERT(m_plus.e == v.e); + + // --------(-----------------------+-----------------------)-------- (A) + // m- v m+ + // + // --------------------(-----------+-----------------------)-------- (B) + // m- v m+ + // + // First scale v (and m- and m+) such that the exponent is in the range + // [alpha, gamma]. + + const cached_power cached = get_cached_power_for_binary_exponent(m_plus.e); + + const diyfp c_minus_k(cached.f, cached.e); // = c ~= 10^-k + + // The exponent of the products is = v.e + c_minus_k.e + q and is in the range [alpha,gamma] + const diyfp w = diyfp::mul(v, c_minus_k); + const diyfp w_minus = diyfp::mul(m_minus, c_minus_k); + const diyfp w_plus = diyfp::mul(m_plus, c_minus_k); + + // ----(---+---)---------------(---+---)---------------(---+---)---- + // w- w w+ + // = c*m- = c*v = c*m+ + // + // diyfp::mul rounds its result and c_minus_k is approximated too. w, w- and + // w+ are now off by a small amount. + // In fact: + // + // w - v * 10^k < 1 ulp + // + // To account for this inaccuracy, add resp. subtract 1 ulp. + // + // --------+---[---------------(---+---)---------------]---+-------- + // w- M- w M+ w+ + // + // Now any number in [M-, M+] (bounds included) will round to w when input, + // regardless of how the input rounding algorithm breaks ties. + // + // And digit_gen generates the shortest possible such number in [M-, M+]. + // Note that this does not mean that Grisu2 always generates the shortest + // possible number in the interval (m-, m+). + const diyfp M_minus(w_minus.f + 1, w_minus.e); + const diyfp M_plus (w_plus.f - 1, w_plus.e ); + + decimal_exponent = -cached.k; // = -(-k) = k + + grisu2_digit_gen(buf, len, decimal_exponent, M_minus, w, M_plus); +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +template<typename FloatType> +JSON_HEDLEY_NON_NULL(1) +void grisu2(char* buf, int& len, int& decimal_exponent, FloatType value) +{ + static_assert(diyfp::kPrecision >= std::numeric_limits<FloatType>::digits + 3, + "internal error: not enough precision"); + + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // If the neighbors (and boundaries) of 'value' are always computed for double-precision + // numbers, all float's can be recovered using strtod (and strtof). However, the resulting + // decimal representations are not exactly "short". + // + // The documentation for 'std::to_chars' (https://en.cppreference.com/w/cpp/utility/to_chars) + // says "value is converted to a string as if by std::sprintf in the default ("C") locale" + // and since sprintf promotes floats to doubles, I think this is exactly what 'std::to_chars' + // does. + // On the other hand, the documentation for 'std::to_chars' requires that "parsing the + // representation using the corresponding std::from_chars function recovers value exactly". That + // indicates that single precision floating-point numbers should be recovered using + // 'std::strtof'. + // + // NB: If the neighbors are computed for single-precision numbers, there is a single float + // (7.0385307e-26f) which can't be recovered using strtod. The resulting double precision + // value is off by 1 ulp. +#if 0 + const boundaries w = compute_boundaries(static_cast<double>(value)); +#else + const boundaries w = compute_boundaries(value); +#endif + + grisu2(buf, len, decimal_exponent, w.minus, w.w, w.plus); +} + +/*! +@brief appends a decimal representation of e to buf +@return a pointer to the element following the exponent. +@pre -1000 < e < 1000 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* append_exponent(char* buf, int e) +{ + JSON_ASSERT(e > -1000); + JSON_ASSERT(e < 1000); + + if (e < 0) + { + e = -e; + *buf++ = '-'; + } + else + { + *buf++ = '+'; + } + + auto k = static_cast<std::uint32_t>(e); + if (k < 10) + { + // Always print at least two digits in the exponent. + // This is for compatibility with printf("%g"). + *buf++ = '0'; + *buf++ = static_cast<char>('0' + k); + } + else if (k < 100) + { + *buf++ = static_cast<char>('0' + k / 10); + k %= 10; + *buf++ = static_cast<char>('0' + k); + } + else + { + *buf++ = static_cast<char>('0' + k / 100); + k %= 100; + *buf++ = static_cast<char>('0' + k / 10); + k %= 10; + *buf++ = static_cast<char>('0' + k); + } + + return buf; +} + +/*! +@brief prettify v = buf * 10^decimal_exponent + +If v is in the range [10^min_exp, 10^max_exp) it will be printed in fixed-point +notation. Otherwise it will be printed in exponential notation. + +@pre min_exp < 0 +@pre max_exp > 0 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* format_buffer(char* buf, int len, int decimal_exponent, + int min_exp, int max_exp) +{ + JSON_ASSERT(min_exp < 0); + JSON_ASSERT(max_exp > 0); + + const int k = len; + const int n = len + decimal_exponent; + + // v = buf * 10^(n-k) + // k is the length of the buffer (number of decimal digits) + // n is the position of the decimal point relative to the start of the buffer. + + if (k <= n && n <= max_exp) + { + // digits[000] + // len <= max_exp + 2 + + std::memset(buf + k, '0', static_cast<size_t>(n) - static_cast<size_t>(k)); + // Make it look like a floating-point number (#362, #378) + buf[n + 0] = '.'; + buf[n + 1] = '0'; + return buf + (static_cast<size_t>(n) + 2); + } + + if (0 < n && n <= max_exp) + { + // dig.its + // len <= max_digits10 + 1 + + JSON_ASSERT(k > n); + + std::memmove(buf + (static_cast<size_t>(n) + 1), buf + n, static_cast<size_t>(k) - static_cast<size_t>(n)); + buf[n] = '.'; + return buf + (static_cast<size_t>(k) + 1U); + } + + if (min_exp < n && n <= 0) + { + // 0.[000]digits + // len <= 2 + (-min_exp - 1) + max_digits10 + + std::memmove(buf + (2 + static_cast<size_t>(-n)), buf, static_cast<size_t>(k)); + buf[0] = '0'; + buf[1] = '.'; + std::memset(buf + 2, '0', static_cast<size_t>(-n)); + return buf + (2U + static_cast<size_t>(-n) + static_cast<size_t>(k)); + } + + if (k == 1) + { + // dE+123 + // len <= 1 + 5 + + buf += 1; + } + else + { + // d.igitsE+123 + // len <= max_digits10 + 1 + 5 + + std::memmove(buf + 2, buf + 1, static_cast<size_t>(k) - 1); + buf[1] = '.'; + buf += 1 + static_cast<size_t>(k); + } + + *buf++ = 'e'; + return append_exponent(buf, n - 1); +} + +} // namespace dtoa_impl + +/*! +@brief generates a decimal representation of the floating-point number value in [first, last). + +The format of the resulting decimal representation is similar to printf's %g +format. Returns an iterator pointing past-the-end of the decimal representation. + +@note The input number must be finite, i.e. NaN's and Inf's are not supported. +@note The buffer must be large enough. +@note The result is NOT null-terminated. +*/ +template<typename FloatType> +JSON_HEDLEY_NON_NULL(1, 2) +JSON_HEDLEY_RETURNS_NON_NULL +char* to_chars(char* first, const char* last, FloatType value) +{ + static_cast<void>(last); // maybe unused - fix warning + JSON_ASSERT(std::isfinite(value)); + + // Use signbit(value) instead of (value < 0) since signbit works for -0. + if (std::signbit(value)) + { + value = -value; + *first++ = '-'; + } + +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + if (value == 0) // +-0 + { + *first++ = '0'; + // Make it look like a floating-point number (#362, #378) + *first++ = '.'; + *first++ = '0'; + return first; + } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + + JSON_ASSERT(last - first >= std::numeric_limits<FloatType>::max_digits10); + + // Compute v = buffer * 10^decimal_exponent. + // The decimal digits are stored in the buffer, which needs to be interpreted + // as an unsigned decimal integer. + // len is the length of the buffer, i.e. the number of decimal digits. + int len = 0; + int decimal_exponent = 0; + dtoa_impl::grisu2(first, len, decimal_exponent, value); + + JSON_ASSERT(len <= std::numeric_limits<FloatType>::max_digits10); + + // Format the buffer like printf("%.*g", prec, value) + constexpr int kMinExp = -4; + // Use digits10 here to increase compatibility with version 2. + constexpr int kMaxExp = std::numeric_limits<FloatType>::digits10; + + JSON_ASSERT(last - first >= kMaxExp + 2); + JSON_ASSERT(last - first >= 2 + (-kMinExp - 1) + std::numeric_limits<FloatType>::max_digits10); + JSON_ASSERT(last - first >= std::numeric_limits<FloatType>::max_digits10 + 6); + + return dtoa_impl::format_buffer(first, len, decimal_exponent, kMinExp, kMaxExp); +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/output/binary_writer.hpp> + +// #include <nlohmann/detail/output/output_adapters.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// serialization // +/////////////////// + +/// how to treat decoding errors +enum class error_handler_t +{ + strict, ///< throw a type_error exception in case of invalid UTF-8 + replace, ///< replace invalid UTF-8 sequences with U+FFFD + ignore ///< ignore invalid UTF-8 sequences +}; + +template<typename BasicJsonType> +class serializer +{ + using string_t = typename BasicJsonType::string_t; + using number_float_t = typename BasicJsonType::number_float_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using binary_char_t = typename BasicJsonType::binary_t::value_type; + static constexpr std::uint8_t UTF8_ACCEPT = 0; + static constexpr std::uint8_t UTF8_REJECT = 1; + + public: + /*! + @param[in] s output stream to serialize to + @param[in] ichar indentation character to use + @param[in] error_handler_ how to react on decoding errors + */ + serializer(output_adapter_t<char> s, const char ichar, + error_handler_t error_handler_ = error_handler_t::strict) + : o(std::move(s)) + , loc(std::localeconv()) + , thousands_sep(loc->thousands_sep == nullptr ? '\0' : std::char_traits<char>::to_char_type(* (loc->thousands_sep))) + , decimal_point(loc->decimal_point == nullptr ? '\0' : std::char_traits<char>::to_char_type(* (loc->decimal_point))) + , indent_char(ichar) + , indent_string(512, indent_char) + , error_handler(error_handler_) + {} + + // delete because of pointer members + serializer(const serializer&) = delete; + serializer& operator=(const serializer&) = delete; + serializer(serializer&&) = delete; + serializer& operator=(serializer&&) = delete; + ~serializer() = default; + + /*! + @brief internal implementation of the serialization function + + This function is called by the public member function dump and organizes + the serialization internally. The indentation level is propagated as + additional parameter. In case of arrays and objects, the function is + called recursively. + + - strings and object keys are escaped using `escape_string()` + - integer numbers are converted implicitly via `operator<<` + - floating-point numbers are converted to a string using `"%g"` format + - binary values are serialized as objects containing the subtype and the + byte array + + @param[in] val value to serialize + @param[in] pretty_print whether the output shall be pretty-printed + @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters + in the output are escaped with `\uXXXX` sequences, and the result consists + of ASCII characters only. + @param[in] indent_step the indent level + @param[in] current_indent the current indent level (only used internally) + */ + void dump(const BasicJsonType& val, + const bool pretty_print, + const bool ensure_ascii, + const unsigned int indent_step, + const unsigned int current_indent = 0) + { + switch (val.m_type) + { + case value_t::object: + { + if (val.m_value.object->empty()) + { + o->write_characters("{}", 2); + return; + } + + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_character('{'); + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + + o->write_character('}'); + } + + return; + } + + case value_t::array: + { + if (val.m_value.array->empty()) + { + o->write_characters("[]", 2); + return; + } + + if (pretty_print) + { + o->write_characters("[\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + dump(*i, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + o->write_characters(indent_string.c_str(), new_indent); + dump(val.m_value.array->back(), true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character(']'); + } + else + { + o->write_character('['); + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + dump(*i, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + dump(val.m_value.array->back(), false, ensure_ascii, indent_step, current_indent); + + o->write_character(']'); + } + + return; + } + + case value_t::string: + { + o->write_character('\"'); + dump_escaped(*val.m_value.string, ensure_ascii); + o->write_character('\"'); + return; + } + + case value_t::binary: + { + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"bytes\": [", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_characters(", ", 2); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\n", 3); + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"subtype\": ", 11); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + } + else + { + o->write_characters("null", 4); + } + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_characters("{\"bytes\":[", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_character(','); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\"subtype\":", 12); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + o->write_character('}'); + } + else + { + o->write_characters("null}", 5); + } + } + return; + } + + case value_t::boolean: + { + if (val.m_value.boolean) + { + o->write_characters("true", 4); + } + else + { + o->write_characters("false", 5); + } + return; + } + + case value_t::number_integer: + { + dump_integer(val.m_value.number_integer); + return; + } + + case value_t::number_unsigned: + { + dump_integer(val.m_value.number_unsigned); + return; + } + + case value_t::number_float: + { + dump_float(val.m_value.number_float); + return; + } + + case value_t::discarded: + { + o->write_characters("<discarded>", 11); + return; + } + + case value_t::null: + { + o->write_characters("null", 4); + return; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + } + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief dump escaped string + + Escape a string by replacing certain special characters by a sequence of an + escape character (backslash) and another character and other control + characters by a sequence of "\u" followed by a four-digit hex + representation. The escaped string is written to output stream @a o. + + @param[in] s the string to escape + @param[in] ensure_ascii whether to escape non-ASCII characters with + \uXXXX sequences + + @complexity Linear in the length of string @a s. + */ + void dump_escaped(const string_t& s, const bool ensure_ascii) + { + std::uint32_t codepoint{}; + std::uint8_t state = UTF8_ACCEPT; + std::size_t bytes = 0; // number of bytes written to string_buffer + + // number of bytes written at the point of the last valid byte + std::size_t bytes_after_last_accept = 0; + std::size_t undumped_chars = 0; + + for (std::size_t i = 0; i < s.size(); ++i) + { + const auto byte = static_cast<std::uint8_t>(s[i]); + + switch (decode(state, codepoint, byte)) + { + case UTF8_ACCEPT: // decode found a new code point + { + switch (codepoint) + { + case 0x08: // backspace + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'b'; + break; + } + + case 0x09: // horizontal tab + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 't'; + break; + } + + case 0x0A: // newline + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'n'; + break; + } + + case 0x0C: // formfeed + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'f'; + break; + } + + case 0x0D: // carriage return + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'r'; + break; + } + + case 0x22: // quotation mark + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\"'; + break; + } + + case 0x5C: // reverse solidus + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\\'; + break; + } + + default: + { + // escape control characters (0x00..0x1F) or, if + // ensure_ascii parameter is used, non-ASCII characters + if ((codepoint <= 0x1F) || (ensure_ascii && (codepoint >= 0x7F))) + { + if (codepoint <= 0xFFFF) + { + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + static_cast<void>((std::snprintf)(string_buffer.data() + bytes, 7, "\\u%04x", + static_cast<std::uint16_t>(codepoint))); + bytes += 6; + } + else + { + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + static_cast<void>((std::snprintf)(string_buffer.data() + bytes, 13, "\\u%04x\\u%04x", + static_cast<std::uint16_t>(0xD7C0u + (codepoint >> 10u)), + static_cast<std::uint16_t>(0xDC00u + (codepoint & 0x3FFu)))); + bytes += 12; + } + } + else + { + // copy byte to buffer (all previous bytes + // been copied have in default case above) + string_buffer[bytes++] = s[i]; + } + break; + } + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + // remember the byte position of this accept + bytes_after_last_accept = bytes; + undumped_chars = 0; + break; + } + + case UTF8_REJECT: // decode found invalid UTF-8 byte + { + switch (error_handler) + { + case error_handler_t::strict: + { + JSON_THROW(type_error::create(316, concat("invalid UTF-8 byte at index ", std::to_string(i), ": 0x", hex_bytes(byte | 0)), nullptr)); + } + + case error_handler_t::ignore: + case error_handler_t::replace: + { + // in case we saw this character the first time, we + // would like to read it again, because the byte + // may be OK for itself, but just not OK for the + // previous sequence + if (undumped_chars > 0) + { + --i; + } + + // reset length buffer to the last accepted index; + // thus removing/ignoring the invalid characters + bytes = bytes_after_last_accept; + + if (error_handler == error_handler_t::replace) + { + // add a replacement character + if (ensure_ascii) + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'u'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'd'; + } + else + { + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xEF'); + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xBF'); + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xBD'); + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + bytes_after_last_accept = bytes; + } + + undumped_chars = 0; + + // continue processing the string + state = UTF8_ACCEPT; + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + break; + } + + default: // decode found yet incomplete multi-byte code point + { + if (!ensure_ascii) + { + // code point will not be escaped - copy byte to buffer + string_buffer[bytes++] = s[i]; + } + ++undumped_chars; + break; + } + } + } + + // we finished processing the string + if (JSON_HEDLEY_LIKELY(state == UTF8_ACCEPT)) + { + // write buffer + if (bytes > 0) + { + o->write_characters(string_buffer.data(), bytes); + } + } + else + { + // we finish reading, but do not accept: string was incomplete + switch (error_handler) + { + case error_handler_t::strict: + { + JSON_THROW(type_error::create(316, concat("incomplete UTF-8 string; last byte: 0x", hex_bytes(static_cast<std::uint8_t>(s.back() | 0))), nullptr)); + } + + case error_handler_t::ignore: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + break; + } + + case error_handler_t::replace: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + // add a replacement character + if (ensure_ascii) + { + o->write_characters("\\ufffd", 6); + } + else + { + o->write_characters("\xEF\xBF\xBD", 3); + } + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + } + } + + private: + /*! + @brief count digits + + Count the number of decimal (base 10) digits for an input unsigned integer. + + @param[in] x unsigned integer number to count its digits + @return number of decimal digits + */ + inline unsigned int count_digits(number_unsigned_t x) noexcept + { + unsigned int n_digits = 1; + for (;;) + { + if (x < 10) + { + return n_digits; + } + if (x < 100) + { + return n_digits + 1; + } + if (x < 1000) + { + return n_digits + 2; + } + if (x < 10000) + { + return n_digits + 3; + } + x = x / 10000u; + n_digits += 4; + } + } + + /*! + * @brief convert a byte to a uppercase hex representation + * @param[in] byte byte to represent + * @return representation ("00".."FF") + */ + static std::string hex_bytes(std::uint8_t byte) + { + std::string result = "FF"; + constexpr const char* nibble_to_hex = "0123456789ABCDEF"; + result[0] = nibble_to_hex[byte / 16]; + result[1] = nibble_to_hex[byte % 16]; + return result; + } + + // templates to avoid warnings about useless casts + template <typename NumberType, enable_if_t<std::is_signed<NumberType>::value, int> = 0> + bool is_negative_number(NumberType x) + { + return x < 0; + } + + template < typename NumberType, enable_if_t <std::is_unsigned<NumberType>::value, int > = 0 > + bool is_negative_number(NumberType /*unused*/) + { + return false; + } + + /*! + @brief dump an integer + + Dump a given integer to output stream @a o. Works internally with + @a number_buffer. + + @param[in] x integer number (signed or unsigned) to dump + @tparam NumberType either @a number_integer_t or @a number_unsigned_t + */ + template < typename NumberType, detail::enable_if_t < + std::is_integral<NumberType>::value || + std::is_same<NumberType, number_unsigned_t>::value || + std::is_same<NumberType, number_integer_t>::value || + std::is_same<NumberType, binary_char_t>::value, + int > = 0 > + void dump_integer(NumberType x) + { + static constexpr std::array<std::array<char, 2>, 100> digits_to_99 + { + { + {{'0', '0'}}, {{'0', '1'}}, {{'0', '2'}}, {{'0', '3'}}, {{'0', '4'}}, {{'0', '5'}}, {{'0', '6'}}, {{'0', '7'}}, {{'0', '8'}}, {{'0', '9'}}, + {{'1', '0'}}, {{'1', '1'}}, {{'1', '2'}}, {{'1', '3'}}, {{'1', '4'}}, {{'1', '5'}}, {{'1', '6'}}, {{'1', '7'}}, {{'1', '8'}}, {{'1', '9'}}, + {{'2', '0'}}, {{'2', '1'}}, {{'2', '2'}}, {{'2', '3'}}, {{'2', '4'}}, {{'2', '5'}}, {{'2', '6'}}, {{'2', '7'}}, {{'2', '8'}}, {{'2', '9'}}, + {{'3', '0'}}, {{'3', '1'}}, {{'3', '2'}}, {{'3', '3'}}, {{'3', '4'}}, {{'3', '5'}}, {{'3', '6'}}, {{'3', '7'}}, {{'3', '8'}}, {{'3', '9'}}, + {{'4', '0'}}, {{'4', '1'}}, {{'4', '2'}}, {{'4', '3'}}, {{'4', '4'}}, {{'4', '5'}}, {{'4', '6'}}, {{'4', '7'}}, {{'4', '8'}}, {{'4', '9'}}, + {{'5', '0'}}, {{'5', '1'}}, {{'5', '2'}}, {{'5', '3'}}, {{'5', '4'}}, {{'5', '5'}}, {{'5', '6'}}, {{'5', '7'}}, {{'5', '8'}}, {{'5', '9'}}, + {{'6', '0'}}, {{'6', '1'}}, {{'6', '2'}}, {{'6', '3'}}, {{'6', '4'}}, {{'6', '5'}}, {{'6', '6'}}, {{'6', '7'}}, {{'6', '8'}}, {{'6', '9'}}, + {{'7', '0'}}, {{'7', '1'}}, {{'7', '2'}}, {{'7', '3'}}, {{'7', '4'}}, {{'7', '5'}}, {{'7', '6'}}, {{'7', '7'}}, {{'7', '8'}}, {{'7', '9'}}, + {{'8', '0'}}, {{'8', '1'}}, {{'8', '2'}}, {{'8', '3'}}, {{'8', '4'}}, {{'8', '5'}}, {{'8', '6'}}, {{'8', '7'}}, {{'8', '8'}}, {{'8', '9'}}, + {{'9', '0'}}, {{'9', '1'}}, {{'9', '2'}}, {{'9', '3'}}, {{'9', '4'}}, {{'9', '5'}}, {{'9', '6'}}, {{'9', '7'}}, {{'9', '8'}}, {{'9', '9'}}, + } + }; + + // special case for "0" + if (x == 0) + { + o->write_character('0'); + return; + } + + // use a pointer to fill the buffer + auto buffer_ptr = number_buffer.begin(); // NOLINT(llvm-qualified-auto,readability-qualified-auto,cppcoreguidelines-pro-type-vararg,hicpp-vararg) + + number_unsigned_t abs_value; + + unsigned int n_chars{}; + + if (is_negative_number(x)) + { + *buffer_ptr = '-'; + abs_value = remove_sign(static_cast<number_integer_t>(x)); + + // account one more byte for the minus sign + n_chars = 1 + count_digits(abs_value); + } + else + { + abs_value = static_cast<number_unsigned_t>(x); + n_chars = count_digits(abs_value); + } + + // spare 1 byte for '\0' + JSON_ASSERT(n_chars < number_buffer.size() - 1); + + // jump to the end to generate the string from backward, + // so we later avoid reversing the result + buffer_ptr += n_chars; + + // Fast int2ascii implementation inspired by "Fastware" talk by Andrei Alexandrescu + // See: https://www.youtube.com/watch?v=o4-CwDo2zpg + while (abs_value >= 100) + { + const auto digits_index = static_cast<unsigned>((abs_value % 100)); + abs_value /= 100; + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + + if (abs_value >= 10) + { + const auto digits_index = static_cast<unsigned>(abs_value); + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + else + { + *(--buffer_ptr) = static_cast<char>('0' + abs_value); + } + + o->write_characters(number_buffer.data(), n_chars); + } + + /*! + @brief dump a floating-point number + + Dump a given floating-point number to output stream @a o. Works internally + with @a number_buffer. + + @param[in] x floating-point number to dump + */ + void dump_float(number_float_t x) + { + // NaN / inf + if (!std::isfinite(x)) + { + o->write_characters("null", 4); + return; + } + + // If number_float_t is an IEEE-754 single or double precision number, + // use the Grisu2 algorithm to produce short numbers which are + // guaranteed to round-trip, using strtof and strtod, resp. + // + // NB: The test below works if <long double> == <double>. + static constexpr bool is_ieee_single_or_double + = (std::numeric_limits<number_float_t>::is_iec559 && std::numeric_limits<number_float_t>::digits == 24 && std::numeric_limits<number_float_t>::max_exponent == 128) || + (std::numeric_limits<number_float_t>::is_iec559 && std::numeric_limits<number_float_t>::digits == 53 && std::numeric_limits<number_float_t>::max_exponent == 1024); + + dump_float(x, std::integral_constant<bool, is_ieee_single_or_double>()); + } + + void dump_float(number_float_t x, std::true_type /*is_ieee_single_or_double*/) + { + auto* begin = number_buffer.data(); + auto* end = ::nlohmann::detail::to_chars(begin, begin + number_buffer.size(), x); + + o->write_characters(begin, static_cast<size_t>(end - begin)); + } + + void dump_float(number_float_t x, std::false_type /*is_ieee_single_or_double*/) + { + // get number of digits for a float -> text -> float round-trip + static constexpr auto d = std::numeric_limits<number_float_t>::max_digits10; + + // the actual conversion + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + std::ptrdiff_t len = (std::snprintf)(number_buffer.data(), number_buffer.size(), "%.*g", d, x); + + // negative value indicates an error + JSON_ASSERT(len > 0); + // check if buffer was large enough + JSON_ASSERT(static_cast<std::size_t>(len) < number_buffer.size()); + + // erase thousands separator + if (thousands_sep != '\0') + { + // NOLINTNEXTLINE(readability-qualified-auto,llvm-qualified-auto): std::remove returns an iterator, see https://github.com/nlohmann/json/issues/3081 + const auto end = std::remove(number_buffer.begin(), number_buffer.begin() + len, thousands_sep); + std::fill(end, number_buffer.end(), '\0'); + JSON_ASSERT((end - number_buffer.begin()) <= len); + len = (end - number_buffer.begin()); + } + + // convert decimal point to '.' + if (decimal_point != '\0' && decimal_point != '.') + { + // NOLINTNEXTLINE(readability-qualified-auto,llvm-qualified-auto): std::find returns an iterator, see https://github.com/nlohmann/json/issues/3081 + const auto dec_pos = std::find(number_buffer.begin(), number_buffer.end(), decimal_point); + if (dec_pos != number_buffer.end()) + { + *dec_pos = '.'; + } + } + + o->write_characters(number_buffer.data(), static_cast<std::size_t>(len)); + + // determine if we need to append ".0" + const bool value_is_int_like = + std::none_of(number_buffer.begin(), number_buffer.begin() + len + 1, + [](char c) + { + return c == '.' || c == 'e'; + }); + + if (value_is_int_like) + { + o->write_characters(".0", 2); + } + } + + /*! + @brief check whether a string is UTF-8 encoded + + The function checks each byte of a string whether it is UTF-8 encoded. The + result of the check is stored in the @a state parameter. The function must + be called initially with state 0 (accept). State 1 means the string must + be rejected, because the current byte is not allowed. If the string is + completely processed, but the state is non-zero, the string ended + prematurely; that is, the last byte indicated more bytes should have + followed. + + @param[in,out] state the state of the decoding + @param[in,out] codep codepoint (valid only if resulting state is UTF8_ACCEPT) + @param[in] byte next byte to decode + @return new state + + @note The function has been edited: a std::array is used. + + @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann <bjoern@hoehrmann.de> + @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + */ + static std::uint8_t decode(std::uint8_t& state, std::uint32_t& codep, const std::uint8_t byte) noexcept + { + static const std::array<std::uint8_t, 400> utf8d = + { + { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF + 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF + 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8 + } + }; + + JSON_ASSERT(byte < utf8d.size()); + const std::uint8_t type = utf8d[byte]; + + codep = (state != UTF8_ACCEPT) + ? (byte & 0x3fu) | (codep << 6u) + : (0xFFu >> type) & (byte); + + std::size_t index = 256u + static_cast<size_t>(state) * 16u + static_cast<size_t>(type); + JSON_ASSERT(index < 400); + state = utf8d[index]; + return state; + } + + /* + * Overload to make the compiler happy while it is instantiating + * dump_integer for number_unsigned_t. + * Must never be called. + */ + number_unsigned_t remove_sign(number_unsigned_t x) + { + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return x; // LCOV_EXCL_LINE + } + + /* + * Helper function for dump_integer + * + * This function takes a negative signed integer and returns its absolute + * value as unsigned integer. The plus/minus shuffling is necessary as we can + * not directly remove the sign of an arbitrary signed integer as the + * absolute values of INT_MIN and INT_MAX are usually not the same. See + * #1708 for details. + */ + inline number_unsigned_t remove_sign(number_integer_t x) noexcept + { + JSON_ASSERT(x < 0 && x < (std::numeric_limits<number_integer_t>::max)()); // NOLINT(misc-redundant-expression) + return static_cast<number_unsigned_t>(-(x + 1)) + 1; + } + + private: + /// the output of the serializer + output_adapter_t<char> o = nullptr; + + /// a (hopefully) large enough character buffer + std::array<char, 64> number_buffer{{}}; + + /// the locale + const std::lconv* loc = nullptr; + /// the locale's thousand separator character + const char thousands_sep = '\0'; + /// the locale's decimal point character + const char decimal_point = '\0'; + + /// string buffer + std::array<char, 512> string_buffer{{}}; + + /// the indentation character + const char indent_char; + /// the indentation string + string_t indent_string; + + /// error_handler how to react on decoding errors + const error_handler_t error_handler; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/value_t.hpp> + +// #include <nlohmann/json_fwd.hpp> + +// #include <nlohmann/ordered_map.hpp> + + +#include <functional> // equal_to, less +#include <initializer_list> // initializer_list +#include <iterator> // input_iterator_tag, iterator_traits +#include <memory> // allocator +#include <stdexcept> // for out_of_range +#include <type_traits> // enable_if, is_convertible +#include <utility> // pair +#include <vector> // vector + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ + +/// ordered_map: a minimal map-like container that preserves insertion order +/// for use within nlohmann::basic_json<ordered_map> +template <class Key, class T, class IgnoredLess = std::less<Key>, + class Allocator = std::allocator<std::pair<const Key, T>>> + struct ordered_map : std::vector<std::pair<const Key, T>, Allocator> +{ + using key_type = Key; + using mapped_type = T; + using Container = std::vector<std::pair<const Key, T>, Allocator>; + using iterator = typename Container::iterator; + using const_iterator = typename Container::const_iterator; + using size_type = typename Container::size_type; + using value_type = typename Container::value_type; +#ifdef JSON_HAS_CPP_14 + using key_compare = std::equal_to<>; +#else + using key_compare = std::equal_to<Key>; +#endif + + // Explicit constructors instead of `using Container::Container` + // otherwise older compilers choke on it (GCC <= 5.5, xcode <= 9.4) + ordered_map() noexcept(noexcept(Container())) : Container{} {} + explicit ordered_map(const Allocator& alloc) noexcept(noexcept(Container(alloc))) : Container{alloc} {} + template <class It> + ordered_map(It first, It last, const Allocator& alloc = Allocator()) + : Container{first, last, alloc} {} + ordered_map(std::initializer_list<value_type> init, const Allocator& alloc = Allocator() ) + : Container{init, alloc} {} + + std::pair<iterator, bool> emplace(const key_type& key, T&& t) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return {it, false}; + } + } + Container::emplace_back(key, t); + return {--this->end(), true}; + } + + T& operator[](const Key& key) + { + return emplace(key, T{}).first->second; + } + + const T& operator[](const Key& key) const + { + return at(key); + } + + T& at(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it->second; + } + } + + JSON_THROW(std::out_of_range("key not found")); + } + + const T& at(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it->second; + } + } + + JSON_THROW(std::out_of_range("key not found")); + } + + size_type erase(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + // Since we cannot move const Keys, re-construct them in place + for (auto next = it; ++next != this->end(); ++it) + { + it->~value_type(); // Destroy but keep allocation + new (&*it) value_type{std::move(*next)}; + } + Container::pop_back(); + return 1; + } + } + return 0; + } + + iterator erase(iterator pos) + { + return erase(pos, std::next(pos)); + } + + iterator erase(iterator first, iterator last) + { + const auto elements_affected = std::distance(first, last); + const auto offset = std::distance(Container::begin(), first); + + // This is the start situation. We need to delete elements_affected + // elements (3 in this example: e, f, g), and need to return an + // iterator past the last deleted element (h in this example). + // Note that offset is the distance from the start of the vector + // to first. We will need this later. + + // [ a, b, c, d, e, f, g, h, i, j ] + // ^ ^ + // first last + + // Since we cannot move const Keys, we re-construct them in place. + // We start at first and re-construct (viz. copy) the elements from + // the back of the vector. Example for first iteration: + + // ,--------. + // v | destroy e and re-construct with h + // [ a, b, c, d, e, f, g, h, i, j ] + // ^ ^ + // it it + elements_affected + + for (auto it = first; std::next(it, elements_affected) != Container::end(); ++it) + { + it->~value_type(); // destroy but keep allocation + new (&*it) value_type{std::move(*std::next(it, elements_affected))}; // "move" next element to it + } + + // [ a, b, c, d, h, i, j, h, i, j ] + // ^ ^ + // first last + + // remove the unneeded elements at the end of the vector + Container::resize(this->size() - static_cast<size_type>(elements_affected)); + + // [ a, b, c, d, h, i, j ] + // ^ ^ + // first last + + // first is now pointing past the last deleted element, but we cannot + // use this iterator, because it may have been invalidated by the + // resize call. Instead, we can return begin() + offset. + return Container::begin() + offset; + } + + size_type count(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return 1; + } + } + return 0; + } + + iterator find(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it; + } + } + return Container::end(); + } + + const_iterator find(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it; + } + } + return Container::end(); + } + + std::pair<iterator, bool> insert( value_type&& value ) + { + return emplace(value.first, std::move(value.second)); + } + + std::pair<iterator, bool> insert( const value_type& value ) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, value.first)) + { + return {it, false}; + } + } + Container::push_back(value); + return {--this->end(), true}; + } + + template<typename InputIt> + using require_input_iter = typename std::enable_if<std::is_convertible<typename std::iterator_traits<InputIt>::iterator_category, + std::input_iterator_tag>::value>::type; + + template<typename InputIt, typename = require_input_iter<InputIt>> + void insert(InputIt first, InputIt last) + { + for (auto it = first; it != last; ++it) + { + insert(*it); + } + } + +private: + JSON_NO_UNIQUE_ADDRESS key_compare m_compare = key_compare(); +}; + +} // namespace nlohmann + + +#if defined(JSON_HAS_CPP_17) + #include <any> + #include <string_view> +#endif + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ + +/*! +@brief a class to store JSON values + +@internal +@invariant The member variables @a m_value and @a m_type have the following +relationship: +- If `m_type == value_t::object`, then `m_value.object != nullptr`. +- If `m_type == value_t::array`, then `m_value.array != nullptr`. +- If `m_type == value_t::string`, then `m_value.string != nullptr`. +The invariants are checked by member function assert_invariant(). + +@note ObjectType trick from https://stackoverflow.com/a/9860911 +@endinternal + +@since version 1.0.0 + +@nosubgrouping +*/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +class basic_json // NOLINT(cppcoreguidelines-special-member-functions,hicpp-special-member-functions) +{ + private: + template<detail::value_t> friend struct detail::external_constructor; + + template<typename> + friend class ::nlohmann::json_pointer; + // can be restored when json_pointer backwards compatibility is removed + // friend ::nlohmann::json_pointer<StringType>; + + template<typename BasicJsonType, typename InputType> + friend class ::nlohmann::detail::parser; + friend ::nlohmann::detail::serializer<basic_json>; + template<typename BasicJsonType> + friend class ::nlohmann::detail::iter_impl; + template<typename BasicJsonType, typename CharType> + friend class ::nlohmann::detail::binary_writer; + template<typename BasicJsonType, typename InputType, typename SAX> + friend class ::nlohmann::detail::binary_reader; + template<typename BasicJsonType> + friend class ::nlohmann::detail::json_sax_dom_parser; + template<typename BasicJsonType> + friend class ::nlohmann::detail::json_sax_dom_callback_parser; + friend class ::nlohmann::detail::exception; + + /// workaround type for MSVC + using basic_json_t = NLOHMANN_BASIC_JSON_TPL; + + JSON_PRIVATE_UNLESS_TESTED: + // convenience aliases for types residing in namespace detail; + using lexer = ::nlohmann::detail::lexer_base<basic_json>; + + template<typename InputAdapterType> + static ::nlohmann::detail::parser<basic_json, InputAdapterType> parser( + InputAdapterType adapter, + detail::parser_callback_t<basic_json>cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false + ) + { + return ::nlohmann::detail::parser<basic_json, InputAdapterType>(std::move(adapter), + std::move(cb), allow_exceptions, ignore_comments); + } + + private: + using primitive_iterator_t = ::nlohmann::detail::primitive_iterator_t; + template<typename BasicJsonType> + using internal_iterator = ::nlohmann::detail::internal_iterator<BasicJsonType>; + template<typename BasicJsonType> + using iter_impl = ::nlohmann::detail::iter_impl<BasicJsonType>; + template<typename Iterator> + using iteration_proxy = ::nlohmann::detail::iteration_proxy<Iterator>; + template<typename Base> using json_reverse_iterator = ::nlohmann::detail::json_reverse_iterator<Base>; + + template<typename CharType> + using output_adapter_t = ::nlohmann::detail::output_adapter_t<CharType>; + + template<typename InputType> + using binary_reader = ::nlohmann::detail::binary_reader<basic_json, InputType>; + template<typename CharType> using binary_writer = ::nlohmann::detail::binary_writer<basic_json, CharType>; + + JSON_PRIVATE_UNLESS_TESTED: + using serializer = ::nlohmann::detail::serializer<basic_json>; + + public: + using value_t = detail::value_t; + /// JSON Pointer, see @ref nlohmann::json_pointer + using json_pointer = ::nlohmann::json_pointer<StringType>; + template<typename T, typename SFINAE> + using json_serializer = JSONSerializer<T, SFINAE>; + /// how to treat decoding errors + using error_handler_t = detail::error_handler_t; + /// how to treat CBOR tags + using cbor_tag_handler_t = detail::cbor_tag_handler_t; + /// helper type for initializer lists of basic_json values + using initializer_list_t = std::initializer_list<detail::json_ref<basic_json>>; + + using input_format_t = detail::input_format_t; + /// SAX interface type, see @ref nlohmann::json_sax + using json_sax_t = json_sax<basic_json>; + + //////////////// + // exceptions // + //////////////// + + /// @name exceptions + /// Classes to implement user-defined exceptions. + /// @{ + + using exception = detail::exception; + using parse_error = detail::parse_error; + using invalid_iterator = detail::invalid_iterator; + using type_error = detail::type_error; + using out_of_range = detail::out_of_range; + using other_error = detail::other_error; + + /// @} + + + ///////////////////// + // container types // + ///////////////////// + + /// @name container types + /// The canonic container types to use @ref basic_json like any other STL + /// container. + /// @{ + + /// the type of elements in a basic_json container + using value_type = basic_json; + + /// the type of an element reference + using reference = value_type&; + /// the type of an element const reference + using const_reference = const value_type&; + + /// a type to represent differences between iterators + using difference_type = std::ptrdiff_t; + /// a type to represent container sizes + using size_type = std::size_t; + + /// the allocator type + using allocator_type = AllocatorType<basic_json>; + + /// the type of an element pointer + using pointer = typename std::allocator_traits<allocator_type>::pointer; + /// the type of an element const pointer + using const_pointer = typename std::allocator_traits<allocator_type>::const_pointer; + + /// an iterator for a basic_json container + using iterator = iter_impl<basic_json>; + /// a const iterator for a basic_json container + using const_iterator = iter_impl<const basic_json>; + /// a reverse iterator for a basic_json container + using reverse_iterator = json_reverse_iterator<typename basic_json::iterator>; + /// a const reverse iterator for a basic_json container + using const_reverse_iterator = json_reverse_iterator<typename basic_json::const_iterator>; + + /// @} + + + /// @brief returns the allocator associated with the container + /// @sa https://json.nlohmann.me/api/basic_json/get_allocator/ + static allocator_type get_allocator() + { + return allocator_type(); + } + + /// @brief returns version information on the library + /// @sa https://json.nlohmann.me/api/basic_json/meta/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json meta() + { + basic_json result; + + result["copyright"] = "(C) 2013-2022 Niels Lohmann"; + result["name"] = "JSON for Modern C++"; + result["url"] = "https://github.com/nlohmann/json"; + result["version"]["string"] = + detail::concat(std::to_string(NLOHMANN_JSON_VERSION_MAJOR), '.', + std::to_string(NLOHMANN_JSON_VERSION_MINOR), '.', + std::to_string(NLOHMANN_JSON_VERSION_PATCH)); + result["version"]["major"] = NLOHMANN_JSON_VERSION_MAJOR; + result["version"]["minor"] = NLOHMANN_JSON_VERSION_MINOR; + result["version"]["patch"] = NLOHMANN_JSON_VERSION_PATCH; + +#ifdef _WIN32 + result["platform"] = "win32"; +#elif defined __linux__ + result["platform"] = "linux"; +#elif defined __APPLE__ + result["platform"] = "apple"; +#elif defined __unix__ + result["platform"] = "unix"; +#else + result["platform"] = "unknown"; +#endif + +#if defined(__ICC) || defined(__INTEL_COMPILER) + result["compiler"] = {{"family", "icc"}, {"version", __INTEL_COMPILER}}; +#elif defined(__clang__) + result["compiler"] = {{"family", "clang"}, {"version", __clang_version__}}; +#elif defined(__GNUC__) || defined(__GNUG__) + result["compiler"] = {{"family", "gcc"}, {"version", detail::concat( + std::to_string(__GNUC__), '.', + std::to_string(__GNUC_MINOR__), '.', + std::to_string(__GNUC_PATCHLEVEL__)) + } + }; +#elif defined(__HP_cc) || defined(__HP_aCC) + result["compiler"] = "hp" +#elif defined(__IBMCPP__) + result["compiler"] = {{"family", "ilecpp"}, {"version", __IBMCPP__}}; +#elif defined(_MSC_VER) + result["compiler"] = {{"family", "msvc"}, {"version", _MSC_VER}}; +#elif defined(__PGI) + result["compiler"] = {{"family", "pgcpp"}, {"version", __PGI}}; +#elif defined(__SUNPRO_CC) + result["compiler"] = {{"family", "sunpro"}, {"version", __SUNPRO_CC}}; +#else + result["compiler"] = {{"family", "unknown"}, {"version", "unknown"}}; +#endif + + +#if defined(_MSVC_LANG) + result["compiler"]["c++"] = std::to_string(_MSVC_LANG); +#elif defined(__cplusplus) + result["compiler"]["c++"] = std::to_string(__cplusplus); +#else + result["compiler"]["c++"] = "unknown"; +#endif + return result; + } + + + /////////////////////////// + // JSON value data types // + /////////////////////////// + + /// @name JSON value data types + /// The data types to store a JSON value. These types are derived from + /// the template arguments passed to class @ref basic_json. + /// @{ + + /// @brief default object key comparator type + /// The actual object key comparator type (@ref object_comparator_t) may be + /// different. + /// @sa https://json.nlohmann.me/api/basic_json/default_object_comparator_t/ +#if defined(JSON_HAS_CPP_14) + // use of transparent comparator avoids unnecessary repeated construction of temporaries + // in functions involving lookup by key with types other than object_t::key_type (aka. StringType) + using default_object_comparator_t = std::less<>; +#else + using default_object_comparator_t = std::less<StringType>; +#endif + + /// @brief a type for an object + /// @sa https://json.nlohmann.me/api/basic_json/object_t/ + using object_t = ObjectType<StringType, + basic_json, + default_object_comparator_t, + AllocatorType<std::pair<const StringType, + basic_json>>>; + + /// @brief a type for an array + /// @sa https://json.nlohmann.me/api/basic_json/array_t/ + using array_t = ArrayType<basic_json, AllocatorType<basic_json>>; + + /// @brief a type for a string + /// @sa https://json.nlohmann.me/api/basic_json/string_t/ + using string_t = StringType; + + /// @brief a type for a boolean + /// @sa https://json.nlohmann.me/api/basic_json/boolean_t/ + using boolean_t = BooleanType; + + /// @brief a type for a number (integer) + /// @sa https://json.nlohmann.me/api/basic_json/number_integer_t/ + using number_integer_t = NumberIntegerType; + + /// @brief a type for a number (unsigned) + /// @sa https://json.nlohmann.me/api/basic_json/number_unsigned_t/ + using number_unsigned_t = NumberUnsignedType; + + /// @brief a type for a number (floating-point) + /// @sa https://json.nlohmann.me/api/basic_json/number_float_t/ + using number_float_t = NumberFloatType; + + /// @brief a type for a packed binary type + /// @sa https://json.nlohmann.me/api/basic_json/binary_t/ + using binary_t = nlohmann::byte_container_with_subtype<BinaryType>; + + /// @brief object key comparator type + /// @sa https://json.nlohmann.me/api/basic_json/object_comparator_t/ + using object_comparator_t = detail::actual_object_comparator_t<basic_json>; + + /// @} + + private: + + /// helper for exception-safe object creation + template<typename T, typename... Args> + JSON_HEDLEY_RETURNS_NON_NULL + static T* create(Args&& ... args) + { + AllocatorType<T> alloc; + using AllocatorTraits = std::allocator_traits<AllocatorType<T>>; + + auto deleter = [&](T * obj) + { + AllocatorTraits::deallocate(alloc, obj, 1); + }; + std::unique_ptr<T, decltype(deleter)> obj(AllocatorTraits::allocate(alloc, 1), deleter); + AllocatorTraits::construct(alloc, obj.get(), std::forward<Args>(args)...); + JSON_ASSERT(obj != nullptr); + return obj.release(); + } + + //////////////////////// + // JSON value storage // + //////////////////////// + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief a JSON value + + The actual storage for a JSON value of the @ref basic_json class. This + union combines the different storage types for the JSON value types + defined in @ref value_t. + + JSON type | value_t type | used type + --------- | --------------- | ------------------------ + object | object | pointer to @ref object_t + array | array | pointer to @ref array_t + string | string | pointer to @ref string_t + boolean | boolean | @ref boolean_t + number | number_integer | @ref number_integer_t + number | number_unsigned | @ref number_unsigned_t + number | number_float | @ref number_float_t + binary | binary | pointer to @ref binary_t + null | null | *no value is stored* + + @note Variable-length types (objects, arrays, and strings) are stored as + pointers. The size of the union should not exceed 64 bits if the default + value types are used. + + @since version 1.0.0 + */ + union json_value + { + /// object (stored with pointer to save storage) + object_t* object; + /// array (stored with pointer to save storage) + array_t* array; + /// string (stored with pointer to save storage) + string_t* string; + /// binary (stored with pointer to save storage) + binary_t* binary; + /// boolean + boolean_t boolean; + /// number (integer) + number_integer_t number_integer; + /// number (unsigned integer) + number_unsigned_t number_unsigned; + /// number (floating-point) + number_float_t number_float; + + /// default constructor (for null values) + json_value() = default; + /// constructor for booleans + json_value(boolean_t v) noexcept : boolean(v) {} + /// constructor for numbers (integer) + json_value(number_integer_t v) noexcept : number_integer(v) {} + /// constructor for numbers (unsigned) + json_value(number_unsigned_t v) noexcept : number_unsigned(v) {} + /// constructor for numbers (floating-point) + json_value(number_float_t v) noexcept : number_float(v) {} + /// constructor for empty values of a given type + json_value(value_t t) + { + switch (t) + { + case value_t::object: + { + object = create<object_t>(); + break; + } + + case value_t::array: + { + array = create<array_t>(); + break; + } + + case value_t::string: + { + string = create<string_t>(""); + break; + } + + case value_t::binary: + { + binary = create<binary_t>(); + break; + } + + case value_t::boolean: + { + boolean = static_cast<boolean_t>(false); + break; + } + + case value_t::number_integer: + { + number_integer = static_cast<number_integer_t>(0); + break; + } + + case value_t::number_unsigned: + { + number_unsigned = static_cast<number_unsigned_t>(0); + break; + } + + case value_t::number_float: + { + number_float = static_cast<number_float_t>(0.0); + break; + } + + case value_t::null: + { + object = nullptr; // silence warning, see #821 + break; + } + + case value_t::discarded: + default: + { + object = nullptr; // silence warning, see #821 + if (JSON_HEDLEY_UNLIKELY(t == value_t::null)) + { + JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.10.5", nullptr)); // LCOV_EXCL_LINE + } + break; + } + } + } + + /// constructor for strings + json_value(const string_t& value) : string(create<string_t>(value)) {} + + /// constructor for rvalue strings + json_value(string_t&& value) : string(create<string_t>(std::move(value))) {} + + /// constructor for objects + json_value(const object_t& value) : object(create<object_t>(value)) {} + + /// constructor for rvalue objects + json_value(object_t&& value) : object(create<object_t>(std::move(value))) {} + + /// constructor for arrays + json_value(const array_t& value) : array(create<array_t>(value)) {} + + /// constructor for rvalue arrays + json_value(array_t&& value) : array(create<array_t>(std::move(value))) {} + + /// constructor for binary arrays + json_value(const typename binary_t::container_type& value) : binary(create<binary_t>(value)) {} + + /// constructor for rvalue binary arrays + json_value(typename binary_t::container_type&& value) : binary(create<binary_t>(std::move(value))) {} + + /// constructor for binary arrays (internal type) + json_value(const binary_t& value) : binary(create<binary_t>(value)) {} + + /// constructor for rvalue binary arrays (internal type) + json_value(binary_t&& value) : binary(create<binary_t>(std::move(value))) {} + + void destroy(value_t t) + { + if (t == value_t::array || t == value_t::object) + { + // flatten the current json_value to a heap-allocated stack + std::vector<basic_json> stack; + + // move the top-level items to stack + if (t == value_t::array) + { + stack.reserve(array->size()); + std::move(array->begin(), array->end(), std::back_inserter(stack)); + } + else + { + stack.reserve(object->size()); + for (auto&& it : *object) + { + stack.push_back(std::move(it.second)); + } + } + + while (!stack.empty()) + { + // move the last item to local variable to be processed + basic_json current_item(std::move(stack.back())); + stack.pop_back(); + + // if current_item is array/object, move + // its children to the stack to be processed later + if (current_item.is_array()) + { + std::move(current_item.m_value.array->begin(), current_item.m_value.array->end(), std::back_inserter(stack)); + + current_item.m_value.array->clear(); + } + else if (current_item.is_object()) + { + for (auto&& it : *current_item.m_value.object) + { + stack.push_back(std::move(it.second)); + } + + current_item.m_value.object->clear(); + } + + // it's now safe that current_item get destructed + // since it doesn't have any children + } + } + + switch (t) + { + case value_t::object: + { + AllocatorType<object_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, object); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, object, 1); + break; + } + + case value_t::array: + { + AllocatorType<array_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, array); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, array, 1); + break; + } + + case value_t::string: + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, string, 1); + break; + } + + case value_t::binary: + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, binary, 1); + break; + } + + case value_t::null: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::discarded: + default: + { + break; + } + } + } + }; + + private: + /*! + @brief checks the class invariants + + This function asserts the class invariants. It needs to be called at the + end of every constructor to make sure that created objects respect the + invariant. Furthermore, it has to be called each time the type of a JSON + value is changed, because the invariant expresses a relationship between + @a m_type and @a m_value. + + Furthermore, the parent relation is checked for arrays and objects: If + @a check_parents true and the value is an array or object, then the + container's elements must have the current value as parent. + + @param[in] check_parents whether the parent relation should be checked. + The value is true by default and should only be set to false + during destruction of objects when the invariant does not + need to hold. + */ + void assert_invariant(bool check_parents = true) const noexcept + { + JSON_ASSERT(m_type != value_t::object || m_value.object != nullptr); + JSON_ASSERT(m_type != value_t::array || m_value.array != nullptr); + JSON_ASSERT(m_type != value_t::string || m_value.string != nullptr); + JSON_ASSERT(m_type != value_t::binary || m_value.binary != nullptr); + +#if JSON_DIAGNOSTICS + JSON_TRY + { + // cppcheck-suppress assertWithSideEffect + JSON_ASSERT(!check_parents || !is_structured() || std::all_of(begin(), end(), [this](const basic_json & j) + { + return j.m_parent == this; + })); + } + JSON_CATCH(...) {} // LCOV_EXCL_LINE +#endif + static_cast<void>(check_parents); + } + + void set_parents() + { +#if JSON_DIAGNOSTICS + switch (m_type) + { + case value_t::array: + { + for (auto& element : *m_value.array) + { + element.m_parent = this; + } + break; + } + + case value_t::object: + { + for (auto& element : *m_value.object) + { + element.second.m_parent = this; + } + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + break; + } +#endif + } + + iterator set_parents(iterator it, typename iterator::difference_type count_set_parents) + { +#if JSON_DIAGNOSTICS + for (typename iterator::difference_type i = 0; i < count_set_parents; ++i) + { + (it + i)->m_parent = this; + } +#else + static_cast<void>(count_set_parents); +#endif + return it; + } + + reference set_parent(reference j, std::size_t old_capacity = static_cast<std::size_t>(-1)) + { +#if JSON_DIAGNOSTICS + if (old_capacity != static_cast<std::size_t>(-1)) + { + // see https://github.com/nlohmann/json/issues/2838 + JSON_ASSERT(type() == value_t::array); + if (JSON_HEDLEY_UNLIKELY(m_value.array->capacity() != old_capacity)) + { + // capacity has changed: update all parents + set_parents(); + return j; + } + } + + // ordered_json uses a vector internally, so pointers could have + // been invalidated; see https://github.com/nlohmann/json/issues/2962 +#ifdef JSON_HEDLEY_MSVC_VERSION +#pragma warning(push ) +#pragma warning(disable : 4127) // ignore warning to replace if with if constexpr +#endif + if (detail::is_ordered_map<object_t>::value) + { + set_parents(); + return j; + } +#ifdef JSON_HEDLEY_MSVC_VERSION +#pragma warning( pop ) +#endif + + j.m_parent = this; +#else + static_cast<void>(j); + static_cast<void>(old_capacity); +#endif + return j; + } + + public: + ////////////////////////// + // JSON parser callback // + ////////////////////////// + + /// @brief parser event types + /// @sa https://json.nlohmann.me/api/basic_json/parse_event_t/ + using parse_event_t = detail::parse_event_t; + + /// @brief per-element parser callback type + /// @sa https://json.nlohmann.me/api/basic_json/parser_callback_t/ + using parser_callback_t = detail::parser_callback_t<basic_json>; + + ////////////////// + // constructors // + ////////////////// + + /// @name constructors and destructors + /// Constructors of class @ref basic_json, copy/move constructor, copy + /// assignment, static functions creating objects, and the destructor. + /// @{ + + /// @brief create an empty value with a given type + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(const value_t v) + : m_type(v), m_value(v) + { + assert_invariant(); + } + + /// @brief create a null object + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(std::nullptr_t = nullptr) noexcept // NOLINT(bugprone-exception-escape) + : basic_json(value_t::null) + { + assert_invariant(); + } + + /// @brief create a JSON value from compatible types + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < typename CompatibleType, + typename U = detail::uncvref_t<CompatibleType>, + detail::enable_if_t < + !detail::is_basic_json<U>::value && detail::is_compatible_type<basic_json_t, U>::value, int > = 0 > + basic_json(CompatibleType && val) noexcept(noexcept( // NOLINT(bugprone-forwarding-reference-overload,bugprone-exception-escape) + JSONSerializer<U>::to_json(std::declval<basic_json_t&>(), + std::forward<CompatibleType>(val)))) + { + JSONSerializer<U>::to_json(*this, std::forward<CompatibleType>(val)); + set_parents(); + assert_invariant(); + } + + /// @brief create a JSON value from an existing one + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < typename BasicJsonType, + detail::enable_if_t < + detail::is_basic_json<BasicJsonType>::value&& !std::is_same<basic_json, BasicJsonType>::value, int > = 0 > + basic_json(const BasicJsonType& val) + { + using other_boolean_t = typename BasicJsonType::boolean_t; + using other_number_float_t = typename BasicJsonType::number_float_t; + using other_number_integer_t = typename BasicJsonType::number_integer_t; + using other_number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using other_string_t = typename BasicJsonType::string_t; + using other_object_t = typename BasicJsonType::object_t; + using other_array_t = typename BasicJsonType::array_t; + using other_binary_t = typename BasicJsonType::binary_t; + + switch (val.type()) + { + case value_t::boolean: + JSONSerializer<other_boolean_t>::to_json(*this, val.template get<other_boolean_t>()); + break; + case value_t::number_float: + JSONSerializer<other_number_float_t>::to_json(*this, val.template get<other_number_float_t>()); + break; + case value_t::number_integer: + JSONSerializer<other_number_integer_t>::to_json(*this, val.template get<other_number_integer_t>()); + break; + case value_t::number_unsigned: + JSONSerializer<other_number_unsigned_t>::to_json(*this, val.template get<other_number_unsigned_t>()); + break; + case value_t::string: + JSONSerializer<other_string_t>::to_json(*this, val.template get_ref<const other_string_t&>()); + break; + case value_t::object: + JSONSerializer<other_object_t>::to_json(*this, val.template get_ref<const other_object_t&>()); + break; + case value_t::array: + JSONSerializer<other_array_t>::to_json(*this, val.template get_ref<const other_array_t&>()); + break; + case value_t::binary: + JSONSerializer<other_binary_t>::to_json(*this, val.template get_ref<const other_binary_t&>()); + break; + case value_t::null: + *this = nullptr; + break; + case value_t::discarded: + m_type = value_t::discarded; + break; + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + JSON_ASSERT(m_type == val.type()); + set_parents(); + assert_invariant(); + } + + /// @brief create a container (array or object) from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(initializer_list_t init, + bool type_deduction = true, + value_t manual_type = value_t::array) + { + // check if each element is an array with two elements whose first + // element is a string + bool is_an_object = std::all_of(init.begin(), init.end(), + [](const detail::json_ref<basic_json>& element_ref) + { + return element_ref->is_array() && element_ref->size() == 2 && (*element_ref)[0].is_string(); + }); + + // adjust type if type deduction is not wanted + if (!type_deduction) + { + // if array is wanted, do not create an object though possible + if (manual_type == value_t::array) + { + is_an_object = false; + } + + // if object is wanted but impossible, throw an exception + if (JSON_HEDLEY_UNLIKELY(manual_type == value_t::object && !is_an_object)) + { + JSON_THROW(type_error::create(301, "cannot create object from initializer list", nullptr)); + } + } + + if (is_an_object) + { + // the initializer list is a list of pairs -> create object + m_type = value_t::object; + m_value = value_t::object; + + for (auto& element_ref : init) + { + auto element = element_ref.moved_or_copied(); + m_value.object->emplace( + std::move(*((*element.m_value.array)[0].m_value.string)), + std::move((*element.m_value.array)[1])); + } + } + else + { + // the initializer list describes an array -> create array + m_type = value_t::array; + m_value.array = create<array_t>(init.begin(), init.end()); + } + + set_parents(); + assert_invariant(); + } + + /// @brief explicitly create a binary array (without subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = init; + return res; + } + + /// @brief explicitly create a binary array (with subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init, typename binary_t::subtype_type subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(init, subtype); + return res; + } + + /// @brief explicitly create a binary array + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = std::move(init); + return res; + } + + /// @brief explicitly create a binary array (with subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init, typename binary_t::subtype_type subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(std::move(init), subtype); + return res; + } + + /// @brief explicitly create an array from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/array/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json array(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::array); + } + + /// @brief explicitly create an object from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/object/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json object(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::object); + } + + /// @brief construct an array with count copies of given value + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(size_type cnt, const basic_json& val) + : m_type(value_t::array) + { + m_value.array = create<array_t>(cnt, val); + set_parents(); + assert_invariant(); + } + + /// @brief construct a JSON container given an iterator range + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < class InputIT, typename std::enable_if < + std::is_same<InputIT, typename basic_json_t::iterator>::value || + std::is_same<InputIT, typename basic_json_t::const_iterator>::value, int >::type = 0 > + basic_json(InputIT first, InputIT last) + { + JSON_ASSERT(first.m_object != nullptr); + JSON_ASSERT(last.m_object != nullptr); + + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(201, "iterators are not compatible", nullptr)); + } + + // copy type from first iterator + m_type = first.m_object->m_type; + + // check if iterator range is complete for primitive values + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + { + if (JSON_HEDLEY_UNLIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range", first.m_object)); + } + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::binary: + case value_t::discarded: + default: + break; + } + + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = first.m_object->m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = first.m_object->m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value.number_float = first.m_object->m_value.number_float; + break; + } + + case value_t::boolean: + { + m_value.boolean = first.m_object->m_value.boolean; + break; + } + + case value_t::string: + { + m_value = *first.m_object->m_value.string; + break; + } + + case value_t::object: + { + m_value.object = create<object_t>(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + m_value.array = create<array_t>(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + case value_t::binary: + { + m_value = *first.m_object->m_value.binary; + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(invalid_iterator::create(206, detail::concat("cannot construct with iterators from ", first.m_object->type_name()), first.m_object)); + } + + set_parents(); + assert_invariant(); + } + + + /////////////////////////////////////// + // other constructors and destructor // + /////////////////////////////////////// + + template<typename JsonRef, + detail::enable_if_t<detail::conjunction<detail::is_json_ref<JsonRef>, + std::is_same<typename JsonRef::value_type, basic_json>>::value, int> = 0 > + basic_json(const JsonRef& ref) : basic_json(ref.moved_or_copied()) {} + + /// @brief copy constructor + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(const basic_json& other) + : m_type(other.m_type) + { + // check of passed value is valid + other.assert_invariant(); + + switch (m_type) + { + case value_t::object: + { + m_value = *other.m_value.object; + break; + } + + case value_t::array: + { + m_value = *other.m_value.array; + break; + } + + case value_t::string: + { + m_value = *other.m_value.string; + break; + } + + case value_t::boolean: + { + m_value = other.m_value.boolean; + break; + } + + case value_t::number_integer: + { + m_value = other.m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value = other.m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value = other.m_value.number_float; + break; + } + + case value_t::binary: + { + m_value = *other.m_value.binary; + break; + } + + case value_t::null: + case value_t::discarded: + default: + break; + } + + set_parents(); + assert_invariant(); + } + + /// @brief move constructor + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(basic_json&& other) noexcept + : m_type(std::move(other.m_type)), + m_value(std::move(other.m_value)) + { + // check that passed value is valid + other.assert_invariant(false); + + // invalidate payload + other.m_type = value_t::null; + other.m_value = {}; + + set_parents(); + assert_invariant(); + } + + /// @brief copy assignment + /// @sa https://json.nlohmann.me/api/basic_json/operator=/ + basic_json& operator=(basic_json other) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + // check that passed value is valid + other.assert_invariant(); + + using std::swap; + swap(m_type, other.m_type); + swap(m_value, other.m_value); + + set_parents(); + assert_invariant(); + return *this; + } + + /// @brief destructor + /// @sa https://json.nlohmann.me/api/basic_json/~basic_json/ + ~basic_json() noexcept + { + assert_invariant(false); + m_value.destroy(m_type); + } + + /// @} + + public: + /////////////////////// + // object inspection // + /////////////////////// + + /// @name object inspection + /// Functions to inspect the type of a JSON value. + /// @{ + + /// @brief serialization + /// @sa https://json.nlohmann.me/api/basic_json/dump/ + string_t dump(const int indent = -1, + const char indent_char = ' ', + const bool ensure_ascii = false, + const error_handler_t error_handler = error_handler_t::strict) const + { + string_t result; + serializer s(detail::output_adapter<char, string_t>(result), indent_char, error_handler); + + if (indent >= 0) + { + s.dump(*this, true, ensure_ascii, static_cast<unsigned int>(indent)); + } + else + { + s.dump(*this, false, ensure_ascii, 0); + } + + return result; + } + + /// @brief return the type of the JSON value (explicit) + /// @sa https://json.nlohmann.me/api/basic_json/type/ + constexpr value_t type() const noexcept + { + return m_type; + } + + /// @brief return whether type is primitive + /// @sa https://json.nlohmann.me/api/basic_json/is_primitive/ + constexpr bool is_primitive() const noexcept + { + return is_null() || is_string() || is_boolean() || is_number() || is_binary(); + } + + /// @brief return whether type is structured + /// @sa https://json.nlohmann.me/api/basic_json/is_structured/ + constexpr bool is_structured() const noexcept + { + return is_array() || is_object(); + } + + /// @brief return whether value is null + /// @sa https://json.nlohmann.me/api/basic_json/is_null/ + constexpr bool is_null() const noexcept + { + return m_type == value_t::null; + } + + /// @brief return whether value is a boolean + /// @sa https://json.nlohmann.me/api/basic_json/is_boolean/ + constexpr bool is_boolean() const noexcept + { + return m_type == value_t::boolean; + } + + /// @brief return whether value is a number + /// @sa https://json.nlohmann.me/api/basic_json/is_number/ + constexpr bool is_number() const noexcept + { + return is_number_integer() || is_number_float(); + } + + /// @brief return whether value is an integer number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_integer/ + constexpr bool is_number_integer() const noexcept + { + return m_type == value_t::number_integer || m_type == value_t::number_unsigned; + } + + /// @brief return whether value is an unsigned integer number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_unsigned/ + constexpr bool is_number_unsigned() const noexcept + { + return m_type == value_t::number_unsigned; + } + + /// @brief return whether value is a floating-point number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_float/ + constexpr bool is_number_float() const noexcept + { + return m_type == value_t::number_float; + } + + /// @brief return whether value is an object + /// @sa https://json.nlohmann.me/api/basic_json/is_object/ + constexpr bool is_object() const noexcept + { + return m_type == value_t::object; + } + + /// @brief return whether value is an array + /// @sa https://json.nlohmann.me/api/basic_json/is_array/ + constexpr bool is_array() const noexcept + { + return m_type == value_t::array; + } + + /// @brief return whether value is a string + /// @sa https://json.nlohmann.me/api/basic_json/is_string/ + constexpr bool is_string() const noexcept + { + return m_type == value_t::string; + } + + /// @brief return whether value is a binary array + /// @sa https://json.nlohmann.me/api/basic_json/is_binary/ + constexpr bool is_binary() const noexcept + { + return m_type == value_t::binary; + } + + /// @brief return whether value is discarded + /// @sa https://json.nlohmann.me/api/basic_json/is_discarded/ + constexpr bool is_discarded() const noexcept + { + return m_type == value_t::discarded; + } + + /// @brief return the type of the JSON value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/operator_value_t/ + constexpr operator value_t() const noexcept + { + return m_type; + } + + /// @} + + private: + ////////////////// + // value access // + ////////////////// + + /// get a boolean (explicit) + boolean_t get_impl(boolean_t* /*unused*/) const + { + if (JSON_HEDLEY_LIKELY(is_boolean())) + { + return m_value.boolean; + } + + JSON_THROW(type_error::create(302, detail::concat("type must be boolean, but is ", type_name()), this)); + } + + /// get a pointer to the value (object) + object_t* get_impl_ptr(object_t* /*unused*/) noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (object) + constexpr const object_t* get_impl_ptr(const object_t* /*unused*/) const noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (array) + array_t* get_impl_ptr(array_t* /*unused*/) noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (array) + constexpr const array_t* get_impl_ptr(const array_t* /*unused*/) const noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (string) + string_t* get_impl_ptr(string_t* /*unused*/) noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (string) + constexpr const string_t* get_impl_ptr(const string_t* /*unused*/) const noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (boolean) + boolean_t* get_impl_ptr(boolean_t* /*unused*/) noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (boolean) + constexpr const boolean_t* get_impl_ptr(const boolean_t* /*unused*/) const noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (integer number) + number_integer_t* get_impl_ptr(number_integer_t* /*unused*/) noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (integer number) + constexpr const number_integer_t* get_impl_ptr(const number_integer_t* /*unused*/) const noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (unsigned number) + number_unsigned_t* get_impl_ptr(number_unsigned_t* /*unused*/) noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (unsigned number) + constexpr const number_unsigned_t* get_impl_ptr(const number_unsigned_t* /*unused*/) const noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (floating-point number) + number_float_t* get_impl_ptr(number_float_t* /*unused*/) noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (floating-point number) + constexpr const number_float_t* get_impl_ptr(const number_float_t* /*unused*/) const noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (binary) + binary_t* get_impl_ptr(binary_t* /*unused*/) noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /// get a pointer to the value (binary) + constexpr const binary_t* get_impl_ptr(const binary_t* /*unused*/) const noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /*! + @brief helper function to implement get_ref() + + This function helps to implement get_ref() without code duplication for + const and non-const overloads + + @tparam ThisType will be deduced as `basic_json` or `const basic_json` + + @throw type_error.303 if ReferenceType does not match underlying value + type of the current JSON + */ + template<typename ReferenceType, typename ThisType> + static ReferenceType get_ref_impl(ThisType& obj) + { + // delegate the call to get_ptr<>() + auto* ptr = obj.template get_ptr<typename std::add_pointer<ReferenceType>::type>(); + + if (JSON_HEDLEY_LIKELY(ptr != nullptr)) + { + return *ptr; + } + + JSON_THROW(type_error::create(303, detail::concat("incompatible ReferenceType for get_ref, actual type is ", obj.type_name()), &obj)); + } + + public: + /// @name value access + /// Direct access to the stored value of a JSON value. + /// @{ + + /// @brief get a pointer value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ptr/ + template<typename PointerType, typename std::enable_if< + std::is_pointer<PointerType>::value, int>::type = 0> + auto get_ptr() noexcept -> decltype(std::declval<basic_json_t&>().get_impl_ptr(std::declval<PointerType>())) + { + // delegate the call to get_impl_ptr<>() + return get_impl_ptr(static_cast<PointerType>(nullptr)); + } + + /// @brief get a pointer value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ptr/ + template < typename PointerType, typename std::enable_if < + std::is_pointer<PointerType>::value&& + std::is_const<typename std::remove_pointer<PointerType>::type>::value, int >::type = 0 > + constexpr auto get_ptr() const noexcept -> decltype(std::declval<const basic_json_t&>().get_impl_ptr(std::declval<PointerType>())) + { + // delegate the call to get_impl_ptr<>() const + return get_impl_ptr(static_cast<PointerType>(nullptr)); + } + + private: + /*! + @brief get a value (explicit) + + Explicit type conversion between the JSON value and a compatible value + which is [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer<ValueType> + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + ValueType ret; + JSONSerializer<ValueType>::from_json(*this, ret); + return ret; + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json, + - @ref json_serializer<ValueType> has a `from_json()` method of the form + `void from_json(const basic_json&, ValueType&)`, and + - @ref json_serializer<ValueType> does not have a `from_json()` method of + the form `ValueType from_json(const basic_json&)` + + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer<ValueType> `from_json()` method throws + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector<short>`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map<std::string\, + json>`.,get__ValueType_const} + + @since version 2.1.0 + */ + template < typename ValueType, + detail::enable_if_t < + detail::is_default_constructible<ValueType>::value&& + detail::has_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType get_impl(detail::priority_tag<0> /*unused*/) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), std::declval<ValueType&>()))) + { + auto ret = ValueType(); + JSONSerializer<ValueType>::from_json(*this, ret); + return ret; + } + + /*! + @brief get a value (explicit); special case + + Explicit type conversion between the JSON value and a compatible value + which is **not** [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and **not** [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer<ValueType> + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + return JSONSerializer<ValueType>::from_json(*this); + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json and + - @ref json_serializer<ValueType> has a `from_json()` method of the form + `ValueType from_json(const basic_json&)` + + @note If @ref json_serializer<ValueType> has both overloads of + `from_json()`, this one is chosen. + + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer<ValueType> `from_json()` method throws + + @since version 2.1.0 + */ + template < typename ValueType, + detail::enable_if_t < + detail::has_non_default_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType get_impl(detail::priority_tag<1> /*unused*/) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>()))) + { + return JSONSerializer<ValueType>::from_json(*this); + } + + /*! + @brief get special-case overload + + This overloads converts the current @ref basic_json in a different + @ref basic_json type + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this, converted into @a BasicJsonType + + @complexity Depending on the implementation of the called `from_json()` + method. + + @since version 3.2.0 + */ + template < typename BasicJsonType, + detail::enable_if_t < + detail::is_basic_json<BasicJsonType>::value, + int > = 0 > + BasicJsonType get_impl(detail::priority_tag<2> /*unused*/) const + { + return *this; + } + + /*! + @brief get special-case overload + + This overloads avoids a lot of template boilerplate, it can be seen as the + identity method + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this + + @complexity Constant. + + @since version 2.1.0 + */ + template<typename BasicJsonType, + detail::enable_if_t< + std::is_same<BasicJsonType, basic_json_t>::value, + int> = 0> + basic_json get_impl(detail::priority_tag<3> /*unused*/) const + { + return *this; + } + + /*! + @brief get a pointer value (explicit) + @copydoc get() + */ + template<typename PointerType, + detail::enable_if_t< + std::is_pointer<PointerType>::value, + int> = 0> + constexpr auto get_impl(detail::priority_tag<4> /*unused*/) const noexcept + -> decltype(std::declval<const basic_json_t&>().template get_ptr<PointerType>()) + { + // delegate the call to get_ptr + return get_ptr<PointerType>(); + } + + public: + /*! + @brief get a (pointer) value (explicit) + + Performs explicit type conversion between the JSON value and a compatible value if required. + + - If the requested type is a pointer to the internally stored JSON value that pointer is returned. + No copies are made. + + - If the requested type is the current @ref basic_json, or a different @ref basic_json convertible + from the current @ref basic_json. + + - Otherwise the value is converted by calling the @ref json_serializer<ValueType> `from_json()` + method. + + @tparam ValueTypeCV the provided value type + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @tparam ValueType if necessary + + @throw what @ref json_serializer<ValueType> `from_json()` method throws if conversion is required + + @since version 2.1.0 + */ + template < typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>> +#if defined(JSON_HAS_CPP_14) + constexpr +#endif + auto get() const noexcept( + noexcept(std::declval<const basic_json_t&>().template get_impl<ValueType>(detail::priority_tag<4> {}))) + -> decltype(std::declval<const basic_json_t&>().template get_impl<ValueType>(detail::priority_tag<4> {})) + { + // we cannot static_assert on ValueTypeCV being non-const, because + // there is support for get<const basic_json_t>(), which is why we + // still need the uncvref + static_assert(!std::is_reference<ValueTypeCV>::value, + "get() cannot be used with reference types, you might want to use get_ref()"); + return get_impl<ValueType>(detail::priority_tag<4> {}); + } + + /*! + @brief get a pointer value (explicit) + + Explicit pointer access to the internally stored JSON value. No copies are + made. + + @warning The pointer becomes invalid if the underlying JSON object + changes. + + @tparam PointerType pointer type; must be a pointer to @ref array_t, @ref + object_t, @ref string_t, @ref boolean_t, @ref number_integer_t, + @ref number_unsigned_t, or @ref number_float_t. + + @return pointer to the internally stored JSON value if the requested + pointer type @a PointerType fits to the JSON value; `nullptr` otherwise + + @complexity Constant. + + @liveexample{The example below shows how pointers to internal values of a + JSON value can be requested. Note that no type conversions are made and a + `nullptr` is returned if the value and the requested pointer type does not + match.,get__PointerType} + + @sa see @ref get_ptr() for explicit pointer-member access + + @since version 1.0.0 + */ + template<typename PointerType, typename std::enable_if< + std::is_pointer<PointerType>::value, int>::type = 0> + auto get() noexcept -> decltype(std::declval<basic_json_t&>().template get_ptr<PointerType>()) + { + // delegate the call to get_ptr + return get_ptr<PointerType>(); + } + + /// @brief get a value (explicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_to/ + template < typename ValueType, + detail::enable_if_t < + !detail::is_basic_json<ValueType>::value&& + detail::has_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType & get_to(ValueType& v) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), v))) + { + JSONSerializer<ValueType>::from_json(*this, v); + return v; + } + + // specialization to allow calling get_to with a basic_json value + // see https://github.com/nlohmann/json/issues/2175 + template<typename ValueType, + detail::enable_if_t < + detail::is_basic_json<ValueType>::value, + int> = 0> + ValueType & get_to(ValueType& v) const + { + v = *this; + return v; + } + + template < + typename T, std::size_t N, + typename Array = T (&)[N], // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + detail::enable_if_t < + detail::has_from_json<basic_json_t, Array>::value, int > = 0 > + Array get_to(T (&v)[N]) const // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + noexcept(noexcept(JSONSerializer<Array>::from_json( + std::declval<const basic_json_t&>(), v))) + { + JSONSerializer<Array>::from_json(*this, v); + return v; + } + + /// @brief get a reference value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ref/ + template<typename ReferenceType, typename std::enable_if< + std::is_reference<ReferenceType>::value, int>::type = 0> + ReferenceType get_ref() + { + // delegate call to get_ref_impl + return get_ref_impl<ReferenceType>(*this); + } + + /// @brief get a reference value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ref/ + template < typename ReferenceType, typename std::enable_if < + std::is_reference<ReferenceType>::value&& + std::is_const<typename std::remove_reference<ReferenceType>::type>::value, int >::type = 0 > + ReferenceType get_ref() const + { + // delegate call to get_ref_impl + return get_ref_impl<ReferenceType>(*this); + } + + /*! + @brief get a value (implicit) + + Implicit type conversion between the JSON value and a compatible value. + The call is realized by calling @ref get() const. + + @tparam ValueType non-pointer type compatible to the JSON value, for + instance `int` for JSON integer numbers, `bool` for JSON booleans, or + `std::vector` types for JSON arrays. The character type of @ref string_t + as well as an initializer list of this type is excluded to avoid + ambiguities as these types implicitly convert to `std::string`. + + @return copy of the JSON value, converted to type @a ValueType + + @throw type_error.302 in case passed type @a ValueType is incompatible + to the JSON value type (e.g., the JSON value is of type boolean, but a + string is requested); see example below + + @complexity Linear in the size of the JSON value. + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector<short>`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map<std::string\, + json>`.,operator__ValueType} + + @since version 1.0.0 + */ + template < typename ValueType, typename std::enable_if < + detail::conjunction < + detail::negation<std::is_pointer<ValueType>>, + detail::negation<std::is_same<ValueType, std::nullptr_t>>, + detail::negation<std::is_same<ValueType, detail::json_ref<basic_json>>>, + detail::negation<std::is_same<ValueType, typename string_t::value_type>>, + detail::negation<detail::is_basic_json<ValueType>>, + detail::negation<std::is_same<ValueType, std::initializer_list<typename string_t::value_type>>>, +#if defined(JSON_HAS_CPP_17) && (defined(__GNUC__) || (defined(_MSC_VER) && _MSC_VER >= 1910 && _MSC_VER <= 1914)) + detail::negation<std::is_same<ValueType, std::string_view>>, +#endif +#if defined(JSON_HAS_CPP_17) + detail::negation<std::is_same<ValueType, std::any>>, +#endif + detail::is_detected_lazy<detail::get_template_function, const basic_json_t&, ValueType> + >::value, int >::type = 0 > + JSON_EXPLICIT operator ValueType() const + { + // delegate the call to get<>() const + return get<ValueType>(); + } + + /// @brief get a binary value + /// @sa https://json.nlohmann.me/api/basic_json/get_binary/ + binary_t& get_binary() + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, detail::concat("type must be binary, but is ", type_name()), this)); + } + + return *get_ptr<binary_t*>(); + } + + /// @brief get a binary value + /// @sa https://json.nlohmann.me/api/basic_json/get_binary/ + const binary_t& get_binary() const + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, detail::concat("type must be binary, but is ", type_name()), this)); + } + + return *get_ptr<const binary_t*>(); + } + + /// @} + + + //////////////////// + // element access // + //////////////////// + + /// @name element access + /// Access to the JSON value. + /// @{ + + /// @brief access specified array element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(size_type idx) + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return set_parent(m_value.array->at(idx)); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + } + else + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + } + + /// @brief access specified array element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(size_type idx) const + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return m_value.array->at(idx); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + } + else + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(const typename object_t::key_type& key) + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(key); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", key, "' not found"), this)); + } + return set_parent(it->second); + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + reference at(KeyType && key) + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", string_t(std::forward<KeyType>(key)), "' not found"), this)); + } + return set_parent(it->second); + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(const typename object_t::key_type& key) const + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(key); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", key, "' not found"), this)); + } + return it->second; + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + const_reference at(KeyType && key) const + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", string_t(std::forward<KeyType>(key)), "' not found"), this)); + } + return it->second; + } + + /// @brief access specified array element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](size_type idx) + { + // implicitly convert null value to an empty array + if (is_null()) + { + m_type = value_t::array; + m_value.array = create<array_t>(); + assert_invariant(); + } + + // operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // fill up array with null values if given idx is outside range + if (idx >= m_value.array->size()) + { +#if JSON_DIAGNOSTICS + // remember array size & capacity before resizing + const auto old_size = m_value.array->size(); + const auto old_capacity = m_value.array->capacity(); +#endif + m_value.array->resize(idx + 1); + +#if JSON_DIAGNOSTICS + if (JSON_HEDLEY_UNLIKELY(m_value.array->capacity() != old_capacity)) + { + // capacity has changed: update all parents + set_parents(); + } + else + { + // set parent for values added above + set_parents(begin() + static_cast<typename iterator::difference_type>(old_size), static_cast<typename iterator::difference_type>(idx + 1 - old_size)); + } +#endif + assert_invariant(); + } + + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a numeric argument with ", type_name()), this)); + } + + /// @brief access specified array element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](size_type idx) const + { + // const operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a numeric argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](typename object_t::key_type key) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + // operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto result = m_value.object->emplace(std::move(key), nullptr); + return set_parent(result.first->second); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](const typename object_t::key_type& key) const + { + // const operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto it = m_value.object->find(key); + JSON_ASSERT(it != m_value.object->end()); + return it->second; + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + // these two functions resolve a (const) char * ambiguity affecting Clang and MSVC + // (they seemingly cannot be constrained to resolve the ambiguity) + template<typename T> + reference operator[](T* key) + { + return operator[](typename object_t::key_type(key)); + } + + template<typename T> + const_reference operator[](T* key) const + { + return operator[](typename object_t::key_type(key)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + reference operator[](KeyType && key) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + // operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto result = m_value.object->emplace(std::forward<KeyType>(key), nullptr); + return set_parent(result.first->second); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + const_reference operator[](KeyType && key) const + { + // const operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto it = m_value.object->find(std::forward<KeyType>(key)); + JSON_ASSERT(it != m_value.object->end()); + return it->second; + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + // this is the value(const typename object_t::key_type&) overload + template < class KeyType, class ValueType, detail::enable_if_t < + std::is_same<KeyType, typename object_t::key_type>::value + && detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value, int > = 0 > + typename std::decay<ValueType>::type value(const KeyType& key, ValueType && default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if key is found, return value and given default value otherwise + const auto it = find(key); + if (it != end()) + { + return it->template get<typename std::decay<ValueType>::type>(); + } + + return std::forward<ValueType>(default_value); + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + string_t value(const typename object_t::key_type& key, const char* default_value) const + { + return value(key, string_t(default_value)); + } + + // these two functions, in conjunction with value(const KeyType &, ValueType &&), + // resolve an ambiguity that would otherwise occur between the json_pointer and + // typename object_t::key_type & overloads + template < class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value, int > = 0 > + typename std::decay<ValueType>::type value(const char* key, ValueType && default_value) const + { + return value(typename object_t::key_type(key), std::forward<ValueType>(default_value)); + } + + string_t value(const char* key, const char* default_value) const + { + return value(typename object_t::key_type(key), string_t(default_value)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// using std::is_convertible in a std::enable_if will fail when using explicit conversions + template < class KeyType, class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value + && detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + typename std::decay<ValueType>::type value(KeyType && key, ValueType && default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if key is found, return value and given default value otherwise + const auto it = find(std::forward<KeyType>(key)); + if (it != end()) + { + return it->template get<typename std::decay<ValueType>::type>(); + } + + return std::forward<ValueType>(default_value); + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + template < class KeyType, detail::enable_if_t < + !detail::is_json_pointer<KeyType>::value, int > = 0 > + string_t value(KeyType && key, const char* default_value) const + { + return value(std::forward<KeyType>(key), string_t(default_value)); + } + + /// @brief access specified object element via JSON Pointer with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + template < class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value, int> = 0 > + ValueType value(const json_pointer& ptr, const ValueType& default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if pointer resolves a value, return it or use default value + JSON_TRY + { + return ptr.get_checked(this).template get<ValueType>(); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + return default_value; + } + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + template < class ValueType, class BasicJsonType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value, int> = 0 > + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + ValueType value(const ::nlohmann::json_pointer<BasicJsonType>& ptr, const ValueType& default_value) const + { + return value(ptr.convert(), default_value); + } + + /// @brief access specified object element via JSON Pointer with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + JSON_HEDLEY_NON_NULL(3) + string_t value(const json_pointer& ptr, const char* default_value) const + { + return value(ptr, string_t(default_value)); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + JSON_HEDLEY_NON_NULL(3) + string_t value(const typename ::nlohmann::json_pointer<BasicJsonType>& ptr, const char* default_value) const + { + return value(ptr.convert(), default_value); + } + + /// @brief access the first element + /// @sa https://json.nlohmann.me/api/basic_json/front/ + reference front() + { + return *begin(); + } + + /// @brief access the first element + /// @sa https://json.nlohmann.me/api/basic_json/front/ + const_reference front() const + { + return *cbegin(); + } + + /// @brief access the last element + /// @sa https://json.nlohmann.me/api/basic_json/back/ + reference back() + { + auto tmp = end(); + --tmp; + return *tmp; + } + + /// @brief access the last element + /// @sa https://json.nlohmann.me/api/basic_json/back/ + const_reference back() const + { + auto tmp = cend(); + --tmp; + return *tmp; + } + + /// @brief remove element given an iterator + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template < class IteratorType, detail::enable_if_t < + std::is_same<IteratorType, typename basic_json_t::iterator>::value || + std::is_same<IteratorType, typename basic_json_t::const_iterator>::value, int > = 0 > + IteratorType erase(IteratorType pos) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != pos.m_object)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_UNLIKELY(!pos.m_it.primitive_iterator.is_begin())) + { + JSON_THROW(invalid_iterator::create(205, "iterator out of range", this)); + } + + if (is_string()) + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(pos.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(pos.m_it.array_iterator); + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return result; + } + + /// @brief remove elements given an iterator range + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template < class IteratorType, detail::enable_if_t < + std::is_same<IteratorType, typename basic_json_t::iterator>::value || + std::is_same<IteratorType, typename basic_json_t::const_iterator>::value, int > = 0 > + IteratorType erase(IteratorType first, IteratorType last) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != first.m_object || this != last.m_object)) + { + JSON_THROW(invalid_iterator::create(203, "iterators do not fit current value", this)); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_LIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range", this)); + } + + if (is_string()) + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return result; + } + + private: + template < typename KeyType, detail::enable_if_t < + detail::has_erase_with_key_type<basic_json_t, KeyType>::value, int > = 0 > + size_type erase_internal(KeyType && key) + { + // this erase only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return m_value.object->erase(std::forward<KeyType>(key)); + } + + template < typename KeyType, detail::enable_if_t < + !detail::has_erase_with_key_type<basic_json_t, KeyType>::value, int > = 0 > + size_type erase_internal(KeyType && key) + { + // this erase only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + const auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it != m_value.object->end()) + { + m_value.object->erase(it); + return 1; + } + return 0; + } + + public: + + /// @brief remove element from a JSON object given a key + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + size_type erase(const typename object_t::key_type& key) + { + // the indirection via erase_internal() is added to avoid making this + // function a template and thus de-rank it during overload resolution + return erase_internal(key); + } + + /// @brief remove element from a JSON object given a key + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + size_type erase(KeyType && key) + { + return erase_internal(std::forward<KeyType>(key)); + } + + /// @brief remove element from a JSON array given an index + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + void erase(const size_type idx) + { + // this erase only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + if (JSON_HEDLEY_UNLIKELY(idx >= size())) + { + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + + m_value.array->erase(m_value.array->begin() + static_cast<difference_type>(idx)); + } + else + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + } + + /// @} + + + //////////// + // lookup // + //////////// + + /// @name lookup + /// @{ + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + iterator find(const typename object_t::key_type& key) + { + auto result = end(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(key); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + const_iterator find(const typename object_t::key_type& key) const + { + auto result = cend(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(key); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + iterator find(KeyType && key) + { + auto result = end(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward<KeyType>(key)); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + const_iterator find(KeyType && key) const + { + auto result = cend(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward<KeyType>(key)); + } + + return result; + } + + /// @brief returns the number of occurrences of a key in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/count/ + size_type count(const typename object_t::key_type& key) const + { + // return 0 for all nonobject types + return is_object() ? m_value.object->count(key) : 0; + } + + /// @brief returns the number of occurrences of a key in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/count/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + size_type count(KeyType && key) const + { + // return 0 for all nonobject types + return is_object() ? m_value.object->count(std::forward<KeyType>(key)) : 0; + } + + /// @brief check the existence of an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + bool contains(const typename object_t::key_type& key) const + { + return is_object() && m_value.object->find(key) != m_value.object->end(); + } + + /// @brief check the existence of an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + bool contains(KeyType && key) const + { + return is_object() && m_value.object->find(std::forward<KeyType>(key)) != m_value.object->end(); + } + + /// @brief check the existence of an element in a JSON object given a JSON pointer + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + bool contains(const json_pointer& ptr) const + { + return ptr.contains(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + bool contains(const typename ::nlohmann::json_pointer<BasicJsonType> ptr) const + { + return ptr.contains(this); + } + + /// @} + + + /////////////// + // iterators // + /////////////// + + /// @name iterators + /// @{ + + /// @brief returns an iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/begin/ + iterator begin() noexcept + { + iterator result(this); + result.set_begin(); + return result; + } + + /// @brief returns an iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/begin/ + const_iterator begin() const noexcept + { + return cbegin(); + } + + /// @brief returns a const iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/cbegin/ + const_iterator cbegin() const noexcept + { + const_iterator result(this); + result.set_begin(); + return result; + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/end/ + iterator end() noexcept + { + iterator result(this); + result.set_end(); + return result; + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/end/ + const_iterator end() const noexcept + { + return cend(); + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/cend/ + const_iterator cend() const noexcept + { + const_iterator result(this); + result.set_end(); + return result; + } + + /// @brief returns an iterator to the reverse-beginning + /// @sa https://json.nlohmann.me/api/basic_json/rbegin/ + reverse_iterator rbegin() noexcept + { + return reverse_iterator(end()); + } + + /// @brief returns an iterator to the reverse-beginning + /// @sa https://json.nlohmann.me/api/basic_json/rbegin/ + const_reverse_iterator rbegin() const noexcept + { + return crbegin(); + } + + /// @brief returns an iterator to the reverse-end + /// @sa https://json.nlohmann.me/api/basic_json/rend/ + reverse_iterator rend() noexcept + { + return reverse_iterator(begin()); + } + + /// @brief returns an iterator to the reverse-end + /// @sa https://json.nlohmann.me/api/basic_json/rend/ + const_reverse_iterator rend() const noexcept + { + return crend(); + } + + /// @brief returns a const reverse iterator to the last element + /// @sa https://json.nlohmann.me/api/basic_json/crbegin/ + const_reverse_iterator crbegin() const noexcept + { + return const_reverse_iterator(cend()); + } + + /// @brief returns a const reverse iterator to one before the first + /// @sa https://json.nlohmann.me/api/basic_json/crend/ + const_reverse_iterator crend() const noexcept + { + return const_reverse_iterator(cbegin()); + } + + public: + /// @brief wrapper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + /// @deprecated This function is deprecated since 3.1.0 and will be removed in + /// version 4.0.0 of the library. Please use @ref items() instead; + /// that is, replace `json::iterator_wrapper(j)` with `j.items()`. + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy<iterator> iterator_wrapper(reference ref) noexcept + { + return ref.items(); + } + + /// @brief wrapper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + /// @deprecated This function is deprecated since 3.1.0 and will be removed in + /// version 4.0.0 of the library. Please use @ref items() instead; + /// that is, replace `json::iterator_wrapper(j)` with `j.items()`. + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy<const_iterator> iterator_wrapper(const_reference ref) noexcept + { + return ref.items(); + } + + /// @brief helper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + iteration_proxy<iterator> items() noexcept + { + return iteration_proxy<iterator>(*this); + } + + /// @brief helper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + iteration_proxy<const_iterator> items() const noexcept + { + return iteration_proxy<const_iterator>(*this); + } + + /// @} + + + ////////////// + // capacity // + ////////////// + + /// @name capacity + /// @{ + + /// @brief checks whether the container is empty. + /// @sa https://json.nlohmann.me/api/basic_json/empty/ + bool empty() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return true; + } + + case value_t::array: + { + // delegate call to array_t::empty() + return m_value.array->empty(); + } + + case value_t::object: + { + // delegate call to object_t::empty() + return m_value.object->empty(); + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types are nonempty + return false; + } + } + } + + /// @brief returns the number of elements + /// @sa https://json.nlohmann.me/api/basic_json/size/ + size_type size() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return 0; + } + + case value_t::array: + { + // delegate call to array_t::size() + return m_value.array->size(); + } + + case value_t::object: + { + // delegate call to object_t::size() + return m_value.object->size(); + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types have size 1 + return 1; + } + } + } + + /// @brief returns the maximum possible number of elements + /// @sa https://json.nlohmann.me/api/basic_json/max_size/ + size_type max_size() const noexcept + { + switch (m_type) + { + case value_t::array: + { + // delegate call to array_t::max_size() + return m_value.array->max_size(); + } + + case value_t::object: + { + // delegate call to object_t::max_size() + return m_value.object->max_size(); + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types have max_size() == size() + return size(); + } + } + } + + /// @} + + + /////////////// + // modifiers // + /////////////// + + /// @name modifiers + /// @{ + + /// @brief clears the contents + /// @sa https://json.nlohmann.me/api/basic_json/clear/ + void clear() noexcept + { + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = 0; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = 0; + break; + } + + case value_t::number_float: + { + m_value.number_float = 0.0; + break; + } + + case value_t::boolean: + { + m_value.boolean = false; + break; + } + + case value_t::string: + { + m_value.string->clear(); + break; + } + + case value_t::binary: + { + m_value.binary->clear(); + break; + } + + case value_t::array: + { + m_value.array->clear(); + break; + } + + case value_t::object: + { + m_value.object->clear(); + break; + } + + case value_t::null: + case value_t::discarded: + default: + break; + } + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(basic_json&& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (move semantics) + const auto old_capacity = m_value.array->capacity(); + m_value.array->push_back(std::move(val)); + set_parent(m_value.array->back(), old_capacity); + // if val is moved from, basic_json move constructor marks it null, so we do not call the destructor + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(basic_json&& val) + { + push_back(std::move(val)); + return *this; + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(const basic_json& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array + const auto old_capacity = m_value.array->capacity(); + m_value.array->push_back(val); + set_parent(m_value.array->back(), old_capacity); + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(const basic_json& val) + { + push_back(val); + return *this; + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(const typename object_t::value_type& val) + { + // push_back only works for null objects or objects + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to object + auto res = m_value.object->insert(val); + set_parent(res.first->second); + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(const typename object_t::value_type& val) + { + push_back(val); + return *this; + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(initializer_list_t init) + { + if (is_object() && init.size() == 2 && (*init.begin())->is_string()) + { + basic_json&& key = init.begin()->moved_or_copied(); + push_back(typename object_t::value_type( + std::move(key.get_ref<string_t&>()), (init.begin() + 1)->moved_or_copied())); + } + else + { + push_back(basic_json(init)); + } + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(initializer_list_t init) + { + push_back(init); + return *this; + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/emplace_back/ + template<class... Args> + reference emplace_back(Args&& ... args) + { + // emplace_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(311, detail::concat("cannot use emplace_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (perfect forwarding) + const auto old_capacity = m_value.array->capacity(); + m_value.array->emplace_back(std::forward<Args>(args)...); + return set_parent(m_value.array->back(), old_capacity); + } + + /// @brief add an object to an object if key does not exist + /// @sa https://json.nlohmann.me/api/basic_json/emplace/ + template<class... Args> + std::pair<iterator, bool> emplace(Args&& ... args) + { + // emplace only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(311, detail::concat("cannot use emplace() with ", type_name()), this)); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to array (perfect forwarding) + auto res = m_value.object->emplace(std::forward<Args>(args)...); + set_parent(res.first->second); + + // create result iterator and set iterator to the result of emplace + auto it = begin(); + it.m_it.object_iterator = res.first; + + // return pair of iterator and boolean + return {it, res.second}; + } + + /// Helper for insertion of an iterator + /// @note: This uses std::distance to support GCC 4.8, + /// see https://github.com/nlohmann/json/pull/1257 + template<typename... Args> + iterator insert_iterator(const_iterator pos, Args&& ... args) + { + iterator result(this); + JSON_ASSERT(m_value.array != nullptr); + + auto insert_pos = std::distance(m_value.array->begin(), pos.m_it.array_iterator); + m_value.array->insert(pos.m_it.array_iterator, std::forward<Args>(args)...); + result.m_it.array_iterator = m_value.array->begin() + insert_pos; + + // This could have been written as: + // result.m_it.array_iterator = m_value.array->insert(pos.m_it.array_iterator, cnt, val); + // but the return value of insert is missing in GCC 4.8, so it is written this way instead. + + set_parents(); + return result; + } + + /// @brief inserts element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, val); + } + + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + /// @brief inserts element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, basic_json&& val) + { + return insert(pos, val); + } + + /// @brief inserts copies of element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, size_type cnt, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, cnt, val); + } + + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + /// @brief inserts range of elements into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, const_iterator first, const_iterator last) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + if (JSON_HEDLEY_UNLIKELY(first.m_object == this)) + { + JSON_THROW(invalid_iterator::create(211, "passed iterators may not belong to container", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, first.m_it.array_iterator, last.m_it.array_iterator); + } + + /// @brief inserts elements from initializer list into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, initializer_list_t ilist) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, ilist.begin(), ilist.end()); + } + + /// @brief inserts range of elements into object + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + void insert(const_iterator first, const_iterator last) + { + // insert only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object())) + { + JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects", this)); + } + + m_value.object->insert(first.m_it.object_iterator, last.m_it.object_iterator); + } + + /// @brief updates a JSON object from another object, overwriting existing keys + /// @sa https://json.nlohmann.me/api/basic_json/update/ + void update(const_reference j, bool merge_objects = false) + { + update(j.begin(), j.end(), merge_objects); + } + + /// @brief updates a JSON object from another object, overwriting existing keys + /// @sa https://json.nlohmann.me/api/basic_json/update/ + void update(const_iterator first, const_iterator last, bool merge_objects = false) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(312, detail::concat("cannot use update() with ", type_name()), this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object())) + { + JSON_THROW(type_error::create(312, detail::concat("cannot use update() with ", first.m_object->type_name()), first.m_object)); + } + + for (auto it = first; it != last; ++it) + { + if (merge_objects && it.value().is_object()) + { + auto it2 = m_value.object->find(it.key()); + if (it2 != m_value.object->end()) + { + it2->second.update(it.value(), true); + continue; + } + } + m_value.object->operator[](it.key()) = it.value(); +#if JSON_DIAGNOSTICS + m_value.object->operator[](it.key()).m_parent = this; +#endif + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(reference other) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + std::swap(m_type, other.m_type); + std::swap(m_value, other.m_value); + + set_parents(); + other.set_parents(); + assert_invariant(); + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + friend void swap(reference left, reference right) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + left.swap(right); + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(array_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + std::swap(*(m_value.array), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(object_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + std::swap(*(m_value.object), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(string_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_string())) + { + std::swap(*(m_value.string), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(binary_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(typename binary_t::container_type& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @} + + ////////////////////////////////////////// + // lexicographical comparison operators // + ////////////////////////////////////////// + + /// @name lexicographical comparison operators + /// @{ + + // note parentheses around operands are necessary; see + // https://github.com/nlohmann/json/issues/1530 +#define JSON_IMPLEMENT_OPERATOR(op, null_result, unordered_result, default_result) \ + const auto lhs_type = lhs.type(); \ + const auto rhs_type = rhs.type(); \ + \ + if (lhs_type == rhs_type) /* NOLINT(readability/braces) */ \ + { \ + switch (lhs_type) \ + { \ + case value_t::array: \ + return (*lhs.m_value.array) op (*rhs.m_value.array); \ + \ + case value_t::object: \ + return (*lhs.m_value.object) op (*rhs.m_value.object); \ + \ + case value_t::null: \ + return (null_result); \ + \ + case value_t::string: \ + return (*lhs.m_value.string) op (*rhs.m_value.string); \ + \ + case value_t::boolean: \ + return (lhs.m_value.boolean) op (rhs.m_value.boolean); \ + \ + case value_t::number_integer: \ + return (lhs.m_value.number_integer) op (rhs.m_value.number_integer); \ + \ + case value_t::number_unsigned: \ + return (lhs.m_value.number_unsigned) op (rhs.m_value.number_unsigned); \ + \ + case value_t::number_float: \ + return (lhs.m_value.number_float) op (rhs.m_value.number_float); \ + \ + case value_t::binary: \ + return (*lhs.m_value.binary) op (*rhs.m_value.binary); \ + \ + case value_t::discarded: \ + default: \ + return (unordered_result); \ + } \ + } \ + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_float) \ + { \ + return static_cast<number_float_t>(lhs.m_value.number_integer) op rhs.m_value.number_float; \ + } \ + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_integer) \ + { \ + return lhs.m_value.number_float op static_cast<number_float_t>(rhs.m_value.number_integer); \ + } \ + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_float) \ + { \ + return static_cast<number_float_t>(lhs.m_value.number_unsigned) op rhs.m_value.number_float; \ + } \ + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_unsigned) \ + { \ + return lhs.m_value.number_float op static_cast<number_float_t>(rhs.m_value.number_unsigned); \ + } \ + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_integer) \ + { \ + return static_cast<number_integer_t>(lhs.m_value.number_unsigned) op rhs.m_value.number_integer; \ + } \ + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_unsigned) \ + { \ + return lhs.m_value.number_integer op static_cast<number_integer_t>(rhs.m_value.number_unsigned); \ + } \ + else if(compares_unordered(lhs, rhs))\ + {\ + return (unordered_result);\ + }\ + \ + return (default_result); + + JSON_PRIVATE_UNLESS_TESTED: + // returns true if: + // - any operand is NaN and the other operand is of number type + // - any operand is discarded + // in legacy mode, discarded values are considered ordered if + // an operation is computed as an odd number of inverses of others + static bool compares_unordered(const_reference lhs, const_reference rhs, bool inverse = false) noexcept + { + if ((lhs.is_number_float() && std::isnan(lhs.m_value.number_float) && rhs.is_number()) + || (rhs.is_number_float() && std::isnan(rhs.m_value.number_float) && lhs.is_number())) + { + return true; + } +#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + return (lhs.is_discarded() || rhs.is_discarded()) && !inverse; +#else + static_cast<void>(inverse); + return lhs.is_discarded() || rhs.is_discarded(); +#endif + } + + private: + bool compares_unordered(const_reference rhs, bool inverse = false) const noexcept + { + return compares_unordered(*this, rhs, inverse); + } + + public: +#if JSON_HAS_THREE_WAY_COMPARISON + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + bool operator==(const_reference rhs) const noexcept + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + const_reference lhs = *this; + JSON_IMPLEMENT_OPERATOR( ==, true, false, false) +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator==(ScalarType rhs) const noexcept + { + return *this == basic_json(rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + bool operator!=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !operator==(rhs); + } + + /// @brief comparison: 3-way + /// @sa https://json.nlohmann.me/api/basic_json/operator_spaceship/ + std::partial_ordering operator<=>(const_reference rhs) const noexcept // *NOPAD* + { + const_reference lhs = *this; + // default_result is used if we cannot compare values. In that case, + // we compare types. + JSON_IMPLEMENT_OPERATOR(<=>, // *NOPAD* + std::partial_ordering::equivalent, + std::partial_ordering::unordered, + lhs_type <=> rhs_type) // *NOPAD* + } + + /// @brief comparison: 3-way + /// @sa https://json.nlohmann.me/api/basic_json/operator_spaceship/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + std::partial_ordering operator<=>(ScalarType rhs) const noexcept // *NOPAD* + { + return *this <=> basic_json(rhs); // *NOPAD* + } + +#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + // all operators that are computed as an odd number of inverses of others + // need to be overloaded to emulate the legacy comparison behavior + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON) + bool operator<=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !(rhs < *this); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator<=(ScalarType rhs) const noexcept + { + return *this <= basic_json(rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON) + bool operator>=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !(*this < rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator>=(ScalarType rhs) const noexcept + { + return *this >= basic_json(rhs); + } +#endif +#else + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + friend bool operator==(const_reference lhs, const_reference rhs) noexcept + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + JSON_IMPLEMENT_OPERATOR( ==, true, false, false) +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator==(const_reference lhs, ScalarType rhs) noexcept + { + return lhs == basic_json(rhs); + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator==(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) == rhs; + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + friend bool operator!=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(lhs == rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator!=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs != basic_json(rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator!=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) != rhs; + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + friend bool operator<(const_reference lhs, const_reference rhs) noexcept + { + // default_result is used if we cannot compare values. In that case, + // we compare types. Note we have to call the operator explicitly, + // because MSVC has problems otherwise. + JSON_IMPLEMENT_OPERATOR( <, false, false, operator<(lhs_type, rhs_type)) + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<(const_reference lhs, ScalarType rhs) noexcept + { + return lhs < basic_json(rhs); + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) < rhs; + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + friend bool operator<=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(rhs < lhs); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs <= basic_json(rhs); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) <= rhs; + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + friend bool operator>(const_reference lhs, const_reference rhs) noexcept + { + // double inverse + if (compares_unordered(lhs, rhs)) + { + return false; + } + return !(lhs <= rhs); + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>(const_reference lhs, ScalarType rhs) noexcept + { + return lhs > basic_json(rhs); + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) > rhs; + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + friend bool operator>=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(lhs < rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs >= basic_json(rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) >= rhs; + } +#endif + +#undef JSON_IMPLEMENT_OPERATOR + + /// @} + + /////////////////// + // serialization // + /////////////////// + + /// @name serialization + /// @{ +#ifndef JSON_NO_IO + /// @brief serialize to stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_ltlt/ + friend std::ostream& operator<<(std::ostream& o, const basic_json& j) + { + // read width member and use it as indentation parameter if nonzero + const bool pretty_print = o.width() > 0; + const auto indentation = pretty_print ? o.width() : 0; + + // reset width to 0 for subsequent calls to this stream + o.width(0); + + // do the actual serialization + serializer s(detail::output_adapter<char>(o), o.fill()); + s.dump(j, pretty_print, false, static_cast<unsigned int>(indentation)); + return o; + } + + /// @brief serialize to stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_ltlt/ + /// @deprecated This function is deprecated since 3.0.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// operator<<(std::ostream&, const basic_json&) instead; that is, + /// replace calls like `j >> o;` with `o << j;`. + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator<<(std::ostream&, const basic_json&)) + friend std::ostream& operator>>(const basic_json& j, std::ostream& o) + { + return o << j; + } +#endif // JSON_NO_IO + /// @} + + + ///////////////////// + // deserialization // + ///////////////////// + + /// @name deserialization + /// @{ + + /// @brief deserialize from a compatible input + /// @sa https://json.nlohmann.me/api/basic_json/parse/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(InputType&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::forward<InputType>(i)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /// @brief deserialize from a pair of character iterators + /// @sa https://json.nlohmann.me/api/basic_json/parse/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(IteratorType first, + IteratorType last, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::move(first), std::move(last)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, parse(ptr, ptr + len)) + static basic_json parse(detail::span_input_adapter&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(i.get(), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /// @brief check if the input is valid JSON + /// @sa https://json.nlohmann.me/api/basic_json/accept/ + template<typename InputType> + static bool accept(InputType&& i, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::forward<InputType>(i)), nullptr, false, ignore_comments).accept(true); + } + + /// @brief check if the input is valid JSON + /// @sa https://json.nlohmann.me/api/basic_json/accept/ + template<typename IteratorType> + static bool accept(IteratorType first, IteratorType last, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::move(first), std::move(last)), nullptr, false, ignore_comments).accept(true); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, accept(ptr, ptr + len)) + static bool accept(detail::span_input_adapter&& i, + const bool ignore_comments = false) + { + return parser(i.get(), nullptr, false, ignore_comments).accept(true); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + template <typename InputType, typename SAX> + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(InputType&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::forward<InputType>(i)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + template<class IteratorType, class SAX> + JSON_HEDLEY_NON_NULL(3) + static bool sax_parse(IteratorType first, IteratorType last, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::move(first), std::move(last)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + /// @deprecated This function is deprecated since 3.8.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// sax_parse(ptr, ptr + len) instead. + template <typename SAX> + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, sax_parse(ptr, ptr + len, ...)) + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(detail::span_input_adapter&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = i.get(); + return format == input_format_t::json + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } +#ifndef JSON_NO_IO + /// @brief deserialize from stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_gtgt/ + /// @deprecated This stream operator is deprecated since 3.0.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// operator>>(std::istream&, basic_json&) instead; that is, + /// replace calls like `j << i;` with `i >> j;`. + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator>>(std::istream&, basic_json&)) + friend std::istream& operator<<(basic_json& j, std::istream& i) + { + return operator>>(i, j); + } + + /// @brief deserialize from stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_gtgt/ + friend std::istream& operator>>(std::istream& i, basic_json& j) + { + parser(detail::input_adapter(i)).parse(false, j); + return i; + } +#endif // JSON_NO_IO + /// @} + + /////////////////////////// + // convenience functions // + /////////////////////////// + + /// @brief return the type as string + /// @sa https://json.nlohmann.me/api/basic_json/type_name/ + JSON_HEDLEY_RETURNS_NON_NULL + const char* type_name() const noexcept + { + switch (m_type) + { + case value_t::null: + return "null"; + case value_t::object: + return "object"; + case value_t::array: + return "array"; + case value_t::string: + return "string"; + case value_t::boolean: + return "boolean"; + case value_t::binary: + return "binary"; + case value_t::discarded: + return "discarded"; + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + default: + return "number"; + } + } + + + JSON_PRIVATE_UNLESS_TESTED: + ////////////////////// + // member variables // + ////////////////////// + + /// the type of the current element + value_t m_type = value_t::null; + + /// the value of the current element + json_value m_value = {}; + +#if JSON_DIAGNOSTICS + /// a pointer to a parent value (for debugging purposes) + basic_json* m_parent = nullptr; +#endif + + ////////////////////////////////////////// + // binary serialization/deserialization // + ////////////////////////////////////////// + + /// @name binary serialization/deserialization support + /// @{ + + public: + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static std::vector<std::uint8_t> to_cbor(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_cbor(j, result); + return result; + } + + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static void to_cbor(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_cbor(j); + } + + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static void to_cbor(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_cbor(j); + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static std::vector<std::uint8_t> to_msgpack(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_msgpack(j, result); + return result; + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static void to_msgpack(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_msgpack(j); + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static void to_msgpack(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_msgpack(j); + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static std::vector<std::uint8_t> to_ubjson(const basic_json& j, + const bool use_size = false, + const bool use_type = false) + { + std::vector<std::uint8_t> result; + to_ubjson(j, result, use_size, use_type); + return result; + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static void to_ubjson(const basic_json& j, detail::output_adapter<std::uint8_t> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<std::uint8_t>(o).write_ubjson(j, use_size, use_type); + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static void to_ubjson(const basic_json& j, detail::output_adapter<char> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<char>(o).write_ubjson(j, use_size, use_type); + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static std::vector<std::uint8_t> to_bjdata(const basic_json& j, + const bool use_size = false, + const bool use_type = false) + { + std::vector<std::uint8_t> result; + to_bjdata(j, result, use_size, use_type); + return result; + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static void to_bjdata(const basic_json& j, detail::output_adapter<std::uint8_t> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<std::uint8_t>(o).write_ubjson(j, use_size, use_type, true, true); + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static void to_bjdata(const basic_json& j, detail::output_adapter<char> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<char>(o).write_ubjson(j, use_size, use_type, true, true); + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static std::vector<std::uint8_t> to_bson(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_bson(j, result); + return result; + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static void to_bson(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_bson(j); + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static void to_bson(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_bson(j); + } + + /// @brief create a JSON value from an input in CBOR format + /// @sa https://json.nlohmann.me/api/basic_json/from_cbor/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in CBOR format + /// @sa https://json.nlohmann.me/api/basic_json/from_cbor/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + return from_cbor(ptr, ptr + len, strict, allow_exceptions, tag_handler); + } + + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in MessagePack format + /// @sa https://json.nlohmann.me/api/basic_json/from_msgpack/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in MessagePack format + /// @sa https://json.nlohmann.me/api/basic_json/from_msgpack/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_msgpack(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in UBJSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_ubjson/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in UBJSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_ubjson/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_ubjson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + + /// @brief create a JSON value from an input in BJData format + /// @sa https://json.nlohmann.me/api/basic_json/from_bjdata/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bjdata(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bjdata).sax_parse(input_format_t::bjdata, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BJData format + /// @sa https://json.nlohmann.me/api/basic_json/from_bjdata/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bjdata(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bjdata).sax_parse(input_format_t::bjdata, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_bson/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_bson/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_bson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + /// @} + + ////////////////////////// + // JSON Pointer support // + ////////////////////////// + + /// @name JSON Pointer functions + /// @{ + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](const json_pointer& ptr) + { + return ptr.get_unchecked(this); + } + + template<typename BasicJsonType, detail::enable_if_t<detail::is_basic_json<BasicJsonType>::value, int> = 0> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + reference operator[](const ::nlohmann::json_pointer<BasicJsonType>& ptr) + { + return ptr.get_unchecked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](const json_pointer& ptr) const + { + return ptr.get_unchecked(this); + } + + template<typename BasicJsonType, detail::enable_if_t<detail::is_basic_json<BasicJsonType>::value, int> = 0> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + const_reference operator[](const ::nlohmann::json_pointer<BasicJsonType>& ptr) const + { + return ptr.get_unchecked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(const json_pointer& ptr) + { + return ptr.get_checked(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + reference at(const ::nlohmann::json_pointer<BasicJsonType>& ptr) + { + return ptr.get_checked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(const json_pointer& ptr) const + { + return ptr.get_checked(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + const_reference at(const ::nlohmann::json_pointer<BasicJsonType>& ptr) const + { + return ptr.get_checked(this); + } + + /// @brief return flattened JSON value + /// @sa https://json.nlohmann.me/api/basic_json/flatten/ + basic_json flatten() const + { + basic_json result(value_t::object); + json_pointer::flatten("", *this, result); + return result; + } + + /// @brief unflatten a previously flattened JSON value + /// @sa https://json.nlohmann.me/api/basic_json/unflatten/ + basic_json unflatten() const + { + return json_pointer::unflatten(*this); + } + + /// @} + + ////////////////////////// + // JSON Patch functions // + ////////////////////////// + + /// @name JSON Patch functions + /// @{ + + /// @brief applies a JSON patch + /// @sa https://json.nlohmann.me/api/basic_json/patch/ + basic_json patch(const basic_json& json_patch) const + { + // make a working copy to apply the patch to + basic_json result = *this; + + // the valid JSON Patch operations + enum class patch_operations {add, remove, replace, move, copy, test, invalid}; + + const auto get_op = [](const std::string & op) + { + if (op == "add") + { + return patch_operations::add; + } + if (op == "remove") + { + return patch_operations::remove; + } + if (op == "replace") + { + return patch_operations::replace; + } + if (op == "move") + { + return patch_operations::move; + } + if (op == "copy") + { + return patch_operations::copy; + } + if (op == "test") + { + return patch_operations::test; + } + + return patch_operations::invalid; + }; + + // wrapper for "add" operation; add value at ptr + const auto operation_add = [&result](json_pointer & ptr, basic_json val) + { + // adding to the root of the target document means replacing it + if (ptr.empty()) + { + result = val; + return; + } + + // make sure the top element of the pointer exists + json_pointer top_pointer = ptr.top(); + if (top_pointer != ptr) + { + result.at(top_pointer); + } + + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result[ptr]; + + switch (parent.m_type) + { + case value_t::null: + case value_t::object: + { + // use operator[] to add value + parent[last_path] = val; + break; + } + + case value_t::array: + { + if (last_path == "-") + { + // special case: append to back + parent.push_back(val); + } + else + { + const auto idx = json_pointer::template array_index<basic_json_t>(last_path); + if (JSON_HEDLEY_UNLIKELY(idx > parent.size())) + { + // avoid undefined behavior + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), &parent)); + } + + // default case: insert add offset + parent.insert(parent.begin() + static_cast<difference_type>(idx), val); + } + break; + } + + // if there exists a parent it cannot be primitive + case value_t::string: // LCOV_EXCL_LINE + case value_t::boolean: // LCOV_EXCL_LINE + case value_t::number_integer: // LCOV_EXCL_LINE + case value_t::number_unsigned: // LCOV_EXCL_LINE + case value_t::number_float: // LCOV_EXCL_LINE + case value_t::binary: // LCOV_EXCL_LINE + case value_t::discarded: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + }; + + // wrapper for "remove" operation; remove value at ptr + const auto operation_remove = [this, &result](json_pointer & ptr) + { + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result.at(ptr); + + // remove child + if (parent.is_object()) + { + // perform range check + auto it = parent.find(last_path); + if (JSON_HEDLEY_LIKELY(it != parent.end())) + { + parent.erase(it); + } + else + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", last_path, "' not found"), this)); + } + } + else if (parent.is_array()) + { + // note erase performs range check + parent.erase(json_pointer::template array_index<basic_json_t>(last_path)); + } + }; + + // type check: top level value must be an array + if (JSON_HEDLEY_UNLIKELY(!json_patch.is_array())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects", &json_patch)); + } + + // iterate and apply the operations + for (const auto& val : json_patch) + { + // wrapper to get a value for an operation + const auto get_value = [&val](const std::string & op, + const std::string & member, + bool string_type) -> basic_json & + { + // find value + auto it = val.m_value.object->find(member); + + // context-sensitive error message + const auto error_msg = (op == "op") ? "operation" : detail::concat("operation '", op, '\''); + + // check if desired value is present + if (JSON_HEDLEY_UNLIKELY(it == val.m_value.object->end())) + { + // NOLINTNEXTLINE(performance-inefficient-string-concatenation) + JSON_THROW(parse_error::create(105, 0, detail::concat(error_msg, " must have member '", member, "'"), &val)); + } + + // check if result is of type string + if (JSON_HEDLEY_UNLIKELY(string_type && !it->second.is_string())) + { + // NOLINTNEXTLINE(performance-inefficient-string-concatenation) + JSON_THROW(parse_error::create(105, 0, detail::concat(error_msg, " must have string member '", member, "'"), &val)); + } + + // no error: return value + return it->second; + }; + + // type check: every element of the array must be an object + if (JSON_HEDLEY_UNLIKELY(!val.is_object())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects", &val)); + } + + // collect mandatory members + const auto op = get_value("op", "op", true).template get<std::string>(); + const auto path = get_value(op, "path", true).template get<std::string>(); + json_pointer ptr(path); + + switch (get_op(op)) + { + case patch_operations::add: + { + operation_add(ptr, get_value("add", "value", false)); + break; + } + + case patch_operations::remove: + { + operation_remove(ptr); + break; + } + + case patch_operations::replace: + { + // the "path" location must exist - use at() + result.at(ptr) = get_value("replace", "value", false); + break; + } + + case patch_operations::move: + { + const auto from_path = get_value("move", "from", true).template get<std::string>(); + json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The move operation is functionally identical to a + // "remove" operation on the "from" location, followed + // immediately by an "add" operation at the target + // location with the value that was just removed. + operation_remove(from_ptr); + operation_add(ptr, v); + break; + } + + case patch_operations::copy: + { + const auto from_path = get_value("copy", "from", true).template get<std::string>(); + const json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The copy is functionally identical to an "add" + // operation at the target location using the value + // specified in the "from" member. + operation_add(ptr, v); + break; + } + + case patch_operations::test: + { + bool success = false; + JSON_TRY + { + // check if "value" matches the one at "path" + // the "path" location must exist - use at() + success = (result.at(ptr) == get_value("test", "value", false)); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + // ignore out of range errors: success remains false + } + + // throw an exception if test fails + if (JSON_HEDLEY_UNLIKELY(!success)) + { + JSON_THROW(other_error::create(501, detail::concat("unsuccessful: ", val.dump()), &val)); + } + + break; + } + + case patch_operations::invalid: + default: + { + // op must be "add", "remove", "replace", "move", "copy", or + // "test" + JSON_THROW(parse_error::create(105, 0, detail::concat("operation value '", op, "' is invalid"), &val)); + } + } + } + + return result; + } + + /// @brief creates a diff as a JSON patch + /// @sa https://json.nlohmann.me/api/basic_json/diff/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json diff(const basic_json& source, const basic_json& target, + const std::string& path = "") + { + // the patch + basic_json result(value_t::array); + + // if the values are the same, return empty patch + if (source == target) + { + return result; + } + + if (source.type() != target.type()) + { + // different types: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + return result; + } + + switch (source.type()) + { + case value_t::array: + { + // first pass: traverse common elements + std::size_t i = 0; + while (i < source.size() && i < target.size()) + { + // recursive call to compare array values at index i + auto temp_diff = diff(source[i], target[i], detail::concat(path, '/', std::to_string(i))); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + ++i; + } + + // We now reached the end of at least one array + // in a second pass, traverse the remaining elements + + // remove my remaining elements + const auto end_index = static_cast<difference_type>(result.size()); + while (i < source.size()) + { + // add operations in reverse order to avoid invalid + // indices + result.insert(result.begin() + end_index, object( + { + {"op", "remove"}, + {"path", detail::concat(path, '/', std::to_string(i))} + })); + ++i; + } + + // add other remaining elements + while (i < target.size()) + { + result.push_back( + { + {"op", "add"}, + {"path", detail::concat(path, "/-")}, + {"value", target[i]} + }); + ++i; + } + + break; + } + + case value_t::object: + { + // first pass: traverse this object's elements + for (auto it = source.cbegin(); it != source.cend(); ++it) + { + // escape the key name to be used in a JSON patch + const auto path_key = detail::concat(path, '/', detail::escape(it.key())); + + if (target.find(it.key()) != target.end()) + { + // recursive call to compare object values at key it + auto temp_diff = diff(it.value(), target[it.key()], path_key); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + } + else + { + // found a key that is not in o -> remove it + result.push_back(object( + { + {"op", "remove"}, {"path", path_key} + })); + } + } + + // second pass: traverse other object's elements + for (auto it = target.cbegin(); it != target.cend(); ++it) + { + if (source.find(it.key()) == source.end()) + { + // found a key that is not in this -> add it + const auto path_key = detail::concat(path, '/', detail::escape(it.key())); + result.push_back( + { + {"op", "add"}, {"path", path_key}, + {"value", it.value()} + }); + } + } + + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // both primitive type: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + break; + } + } + + return result; + } + + /// @} + + //////////////////////////////// + // JSON Merge Patch functions // + //////////////////////////////// + + /// @name JSON Merge Patch functions + /// @{ + + /// @brief applies a JSON Merge Patch + /// @sa https://json.nlohmann.me/api/basic_json/merge_patch/ + void merge_patch(const basic_json& apply_patch) + { + if (apply_patch.is_object()) + { + if (!is_object()) + { + *this = object(); + } + for (auto it = apply_patch.begin(); it != apply_patch.end(); ++it) + { + if (it.value().is_null()) + { + erase(it.key()); + } + else + { + operator[](it.key()).merge_patch(it.value()); + } + } + } + else + { + *this = apply_patch; + } + } + + /// @} +}; + +/// @brief user-defined to_string function for JSON values +/// @sa https://json.nlohmann.me/api/basic_json/to_string/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +std::string to_string(const NLOHMANN_BASIC_JSON_TPL& j) +{ + return j.dump(); +} + +} // namespace nlohmann + +/////////////////////// +// nonmember support // +/////////////////////// + +namespace std // NOLINT(cert-dcl58-cpp) +{ + +/// @brief hash value for JSON objects +/// @sa https://json.nlohmann.me/api/basic_json/std_hash/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct hash<nlohmann::NLOHMANN_BASIC_JSON_TPL> +{ + std::size_t operator()(const nlohmann::NLOHMANN_BASIC_JSON_TPL& j) const + { + return nlohmann::detail::hash(j); + } +}; + +// specialization for std::less<value_t> +template<> +struct less< ::nlohmann::detail::value_t> // do not remove the space after '<', see https://github.com/nlohmann/json/pull/679 +{ + /*! + @brief compare two value_t enum values + @since version 3.0.0 + */ + bool operator()(::nlohmann::detail::value_t lhs, + ::nlohmann::detail::value_t rhs) const noexcept + { +#if JSON_HAS_THREE_WAY_COMPARISON + return std::is_lt(lhs <=> rhs); // *NOPAD* +#else + return ::nlohmann::detail::operator<(lhs, rhs); +#endif + } +}; + +// C++20 prohibit function specialization in the std namespace. +#ifndef JSON_HAS_CPP_20 + +/// @brief exchanges the values of two JSON objects +/// @sa https://json.nlohmann.me/api/basic_json/std_swap/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +inline void swap(nlohmann::NLOHMANN_BASIC_JSON_TPL& j1, nlohmann::NLOHMANN_BASIC_JSON_TPL& j2) noexcept( // NOLINT(readability-inconsistent-declaration-parameter-name) + is_nothrow_move_constructible<nlohmann::NLOHMANN_BASIC_JSON_TPL>::value&& // NOLINT(misc-redundant-expression) + is_nothrow_move_assignable<nlohmann::NLOHMANN_BASIC_JSON_TPL>::value) +{ + j1.swap(j2); +} + +#endif + +} // namespace std + +/// @brief user-defined string literal for JSON values +/// @sa https://json.nlohmann.me/api/basic_json/operator_literal_json/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json operator "" _json(const char* s, std::size_t n) +{ + return nlohmann::json::parse(s, s + n); +} + +/// @brief user-defined string literal for JSON pointer +/// @sa https://json.nlohmann.me/api/basic_json/operator_literal_json_pointer/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std::size_t n) +{ + return nlohmann::json::json_pointer(std::string(s, n)); +} + +// #include <nlohmann/detail/macro_unscope.hpp> + + +// restore clang diagnostic settings +#if defined(__clang__) + #pragma clang diagnostic pop +#endif + +// clean up +#undef JSON_ASSERT +#undef JSON_INTERNAL_CATCH +#undef JSON_THROW +#undef JSON_PRIVATE_UNLESS_TESTED +#undef NLOHMANN_BASIC_JSON_TPL_DECLARATION +#undef NLOHMANN_BASIC_JSON_TPL +#undef JSON_EXPLICIT +#undef NLOHMANN_CAN_CALL_STD_FUNC_IMPL +#undef JSON_INLINE_VARIABLE +#undef JSON_NO_UNIQUE_ADDRESS + +#ifndef JSON_TEST_KEEP_MACROS + #undef JSON_CATCH + #undef JSON_TRY + #undef JSON_HAS_CPP_11 + #undef JSON_HAS_CPP_14 + #undef JSON_HAS_CPP_17 + #undef JSON_HAS_CPP_20 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #undef JSON_HAS_THREE_WAY_COMPARISON + #undef JSON_HAS_RANGES + #undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON +#endif + +// #include <nlohmann/thirdparty/hedley/hedley_undef.hpp> + + +#undef JSON_HEDLEY_ALWAYS_INLINE +#undef JSON_HEDLEY_ARM_VERSION +#undef JSON_HEDLEY_ARM_VERSION_CHECK +#undef JSON_HEDLEY_ARRAY_PARAM +#undef JSON_HEDLEY_ASSUME +#undef JSON_HEDLEY_BEGIN_C_DECLS +#undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#undef JSON_HEDLEY_CLANG_HAS_FEATURE +#undef JSON_HEDLEY_CLANG_HAS_WARNING +#undef JSON_HEDLEY_COMPCERT_VERSION +#undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#undef JSON_HEDLEY_CONCAT +#undef JSON_HEDLEY_CONCAT3 +#undef JSON_HEDLEY_CONCAT3_EX +#undef JSON_HEDLEY_CONCAT_EX +#undef JSON_HEDLEY_CONST +#undef JSON_HEDLEY_CONSTEXPR +#undef JSON_HEDLEY_CONST_CAST +#undef JSON_HEDLEY_CPP_CAST +#undef JSON_HEDLEY_CRAY_VERSION +#undef JSON_HEDLEY_CRAY_VERSION_CHECK +#undef JSON_HEDLEY_C_DECL +#undef JSON_HEDLEY_DEPRECATED +#undef JSON_HEDLEY_DEPRECATED_FOR +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#undef JSON_HEDLEY_DIAGNOSTIC_POP +#undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#undef JSON_HEDLEY_DMC_VERSION +#undef JSON_HEDLEY_DMC_VERSION_CHECK +#undef JSON_HEDLEY_EMPTY_BASES +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#undef JSON_HEDLEY_END_C_DECLS +#undef JSON_HEDLEY_FLAGS +#undef JSON_HEDLEY_FLAGS_CAST +#undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_BUILTIN +#undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_EXTENSION +#undef JSON_HEDLEY_GCC_HAS_FEATURE +#undef JSON_HEDLEY_GCC_HAS_WARNING +#undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#undef JSON_HEDLEY_GCC_VERSION +#undef JSON_HEDLEY_GCC_VERSION_CHECK +#undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#undef JSON_HEDLEY_GNUC_HAS_FEATURE +#undef JSON_HEDLEY_GNUC_HAS_WARNING +#undef JSON_HEDLEY_GNUC_VERSION +#undef JSON_HEDLEY_GNUC_VERSION_CHECK +#undef JSON_HEDLEY_HAS_ATTRIBUTE +#undef JSON_HEDLEY_HAS_BUILTIN +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_HAS_EXTENSION +#undef JSON_HEDLEY_HAS_FEATURE +#undef JSON_HEDLEY_HAS_WARNING +#undef JSON_HEDLEY_IAR_VERSION +#undef JSON_HEDLEY_IAR_VERSION_CHECK +#undef JSON_HEDLEY_IBM_VERSION +#undef JSON_HEDLEY_IBM_VERSION_CHECK +#undef JSON_HEDLEY_IMPORT +#undef JSON_HEDLEY_INLINE +#undef JSON_HEDLEY_INTEL_CL_VERSION +#undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK +#undef JSON_HEDLEY_INTEL_VERSION +#undef JSON_HEDLEY_INTEL_VERSION_CHECK +#undef JSON_HEDLEY_IS_CONSTANT +#undef JSON_HEDLEY_IS_CONSTEXPR_ +#undef JSON_HEDLEY_LIKELY +#undef JSON_HEDLEY_MALLOC +#undef JSON_HEDLEY_MCST_LCC_VERSION +#undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK +#undef JSON_HEDLEY_MESSAGE +#undef JSON_HEDLEY_MSVC_VERSION +#undef JSON_HEDLEY_MSVC_VERSION_CHECK +#undef JSON_HEDLEY_NEVER_INLINE +#undef JSON_HEDLEY_NON_NULL +#undef JSON_HEDLEY_NO_ESCAPE +#undef JSON_HEDLEY_NO_RETURN +#undef JSON_HEDLEY_NO_THROW +#undef JSON_HEDLEY_NULL +#undef JSON_HEDLEY_PELLES_VERSION +#undef JSON_HEDLEY_PELLES_VERSION_CHECK +#undef JSON_HEDLEY_PGI_VERSION +#undef JSON_HEDLEY_PGI_VERSION_CHECK +#undef JSON_HEDLEY_PREDICT +#undef JSON_HEDLEY_PRINTF_FORMAT +#undef JSON_HEDLEY_PRIVATE +#undef JSON_HEDLEY_PUBLIC +#undef JSON_HEDLEY_PURE +#undef JSON_HEDLEY_REINTERPRET_CAST +#undef JSON_HEDLEY_REQUIRE +#undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#undef JSON_HEDLEY_REQUIRE_MSG +#undef JSON_HEDLEY_RESTRICT +#undef JSON_HEDLEY_RETURNS_NON_NULL +#undef JSON_HEDLEY_SENTINEL +#undef JSON_HEDLEY_STATIC_ASSERT +#undef JSON_HEDLEY_STATIC_CAST +#undef JSON_HEDLEY_STRINGIFY +#undef JSON_HEDLEY_STRINGIFY_EX +#undef JSON_HEDLEY_SUNPRO_VERSION +#undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#undef JSON_HEDLEY_TINYC_VERSION +#undef JSON_HEDLEY_TINYC_VERSION_CHECK +#undef JSON_HEDLEY_TI_ARMCL_VERSION +#undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL2000_VERSION +#undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL430_VERSION +#undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL6X_VERSION +#undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL7X_VERSION +#undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CLPRU_VERSION +#undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#undef JSON_HEDLEY_TI_VERSION +#undef JSON_HEDLEY_TI_VERSION_CHECK +#undef JSON_HEDLEY_UNAVAILABLE +#undef JSON_HEDLEY_UNLIKELY +#undef JSON_HEDLEY_UNPREDICTABLE +#undef JSON_HEDLEY_UNREACHABLE +#undef JSON_HEDLEY_UNREACHABLE_RETURN +#undef JSON_HEDLEY_VERSION +#undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#undef JSON_HEDLEY_VERSION_DECODE_MINOR +#undef JSON_HEDLEY_VERSION_DECODE_REVISION +#undef JSON_HEDLEY_VERSION_ENCODE +#undef JSON_HEDLEY_WARNING +#undef JSON_HEDLEY_WARN_UNUSED_RESULT +#undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#undef JSON_HEDLEY_FALL_THROUGH + + + +#endif // INCLUDE_NLOHMANN_JSON_HPP_ diff --git a/gateway-main/gateway/main.cpp b/gateway-main/gateway/main.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5b24bb673b932ec7e28044ebb93a882cc412c806 --- /dev/null +++ b/gateway-main/gateway/main.cpp @@ -0,0 +1,36 @@ +// +// Created by tlabrosse on july 2022. +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include <iostream> +#include <string> +#include <vector> +#include "lib/json.hpp" + +#include "GateStub/GateStub.h" + +using namespace std; +using json = nlohmann::json; + +int main(int argc, char *argv[]) { + string args; + for(int k = 0; k < argc; k++) { + if(k != 0) + args += string(argv[k]); + } + + // ===============CODE START=============== + + cout << args << endl; + gateway::GateStub *gateStub = new gateway::GateStub(args); + + + cout << " =============== Running program =============== " << endl; + cout << gateStub->getExecFile()->run(gateStub->getJsonLine()) << endl; + + cout << " =============== Program ending ================ " << endl; + + return 0; +} diff --git a/gateway-main/image/Image1.png b/gateway-main/image/Image1.png new file mode 100644 index 0000000000000000000000000000000000000000..98edc14d495ccb1df8c0a2cd37de2f04c4f7faf5 Binary files /dev/null and b/gateway-main/image/Image1.png differ diff --git a/gateway-main/image/Image2.png b/gateway-main/image/Image2.png new file mode 100644 index 0000000000000000000000000000000000000000..b42608d0934c6b46c9a8e0375c19061f9de7f13e Binary files /dev/null and b/gateway-main/image/Image2.png differ diff --git a/gateway-main/librairies/cppGate/Argument.h b/gateway-main/librairies/cppGate/Argument.h new file mode 100644 index 0000000000000000000000000000000000000000..75c6a4c1a594533bada0ddabaebf213b8aee8fa3 --- /dev/null +++ b/gateway-main/librairies/cppGate/Argument.h @@ -0,0 +1,36 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_ARGUMENT_H +#define CPPGATE_ARGUMENT_H + +#include <iostream> +#include <map> +#include <string> +#include <utility> + +#include "lib/json.hpp" + +class Argument { +protected: + std::string name; + +public: + explicit Argument(std::string name) :name(name) {} + + const std::string &getName() const { + return name; + } + + virtual Argument* getArgument(std::string name) = 0; + + virtual std::string serialize() const = 0; + + virtual void display() const = 0; +}; + + +#endif //CPPGATE_ARGUMENT_H diff --git a/gateway-main/librairies/cppGate/Dictionary.cpp b/gateway-main/librairies/cppGate/Dictionary.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3ac4c93beac70b44bc0336bf23745bb15f5d7f70 --- /dev/null +++ b/gateway-main/librairies/cppGate/Dictionary.cpp @@ -0,0 +1,78 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "Dictionary.h" +using json = nlohmann::json; + + +Dictionary::Dictionary(std::string name) : Argument(name){} + + +void Dictionary::addArgument(Argument* argument) { + this->value.push_back(argument); +} +void Dictionary::addParameter(std::string name, std::string value) { + Parameter* para = new Parameter(name, value); + this->value.push_back(para); +} + +Argument* Dictionary::getArgument(std::string name) { + if(this->name == name) { + return this; + } + + for(auto argument : this->value) { + Argument* arg = argument->getArgument(name); + if (arg != nullptr) + return arg; + } + return nullptr; +} +Parameter* Dictionary::getParameter(std::string name) { + for(auto argument : this->value) { + if(typeid(argument) == typeid(Parameter)) + if (argument->getName() != name) + return dynamic_cast<Parameter *>(argument); + } + return nullptr; +} + +std::string Dictionary::serialize() const { + std::string output = R"({"Dictionary": { "name": ")" + this->name + R"(", "value" : [)"; + + for(int k = 0; k < this->value.size(); k++) { + output += this->value[k]->serialize(); + if(k != this->value.size()-1) + output += ", "; + } + + output += "]}}"; + return output; +} +void Dictionary::deserialize(std::string jsonLine) { + auto data = json::parse(jsonLine); + + this->name = data["name"]; + auto value_list = data["value"]; + + for(auto val : value_list) { + if(val.contains("Dictionary")) { + Dictionary* dictionary = new Dictionary(""); + dictionary->deserialize(val["Dictionary"]); + + this->value.push_back(dictionary); + } + else if(val.contains("Parameter")) { + Parameter* parameter = new Parameter(val["Parameter"]["name"], val["Parameter"]["value"]); + + this->value.push_back(parameter); + } + } +} + +void Dictionary::display() const { + std::cout << this->serialize() << std::endl; +} diff --git a/gateway-main/librairies/cppGate/Dictionary.h b/gateway-main/librairies/cppGate/Dictionary.h new file mode 100644 index 0000000000000000000000000000000000000000..ed47a7cbc8054d784bf9bc68fa82840363129c55 --- /dev/null +++ b/gateway-main/librairies/cppGate/Dictionary.h @@ -0,0 +1,33 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_DICTIONARY_H +#define CPPGATE_DICTIONARY_H + +#include <vector> +#include "Parameter.h" + +class Dictionary: public Argument { +private: + std::vector<Argument*> value; + +public: + explicit Dictionary(std::string name); + + void addArgument(Argument* argument); + void addParameter(std::string name, std::string value); + + Argument* getArgument(std::string name) override; + Parameter* getParameter(std::string name); + + std::string serialize() const override; + void deserialize(std::string jsonLine); + + void display() const override; +}; + + +#endif //CPPGATE_DICTIONARY_H diff --git a/gateway-main/librairies/cppGate/ExecFile.cpp b/gateway-main/librairies/cppGate/ExecFile.cpp new file mode 100644 index 0000000000000000000000000000000000000000..28d608b98135659f4c44a442cfab7d3c8c1a4664 --- /dev/null +++ b/gateway-main/librairies/cppGate/ExecFile.cpp @@ -0,0 +1,25 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "ExecFile.h" + +ExecFile::ExecFile(std::string path, std::string name, std::string cmd, std::string cmdAlt) : File(path, name), cmd(cmd), cmdAlt(cmdAlt) {} + +std::string ExecFile::serialize() const { + if(this->actif) + return R"({"ExecFile": {"path": ")" + this->path + + R"(", "name": ")" + this->name + + R"(", "cmd": ")" + this->cmd + + R"(", "cmdAlt": ")" + this->cmdAlt + + R"("}})"; + return ""; +} + +void ExecFile::display() const { + std::cout << this->serialize() << std::endl; +} + + diff --git a/gateway-main/librairies/cppGate/ExecFile.h b/gateway-main/librairies/cppGate/ExecFile.h new file mode 100644 index 0000000000000000000000000000000000000000..2e767b83ad6534a48aef4d2e7479bad8f898bc40 --- /dev/null +++ b/gateway-main/librairies/cppGate/ExecFile.h @@ -0,0 +1,26 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_EXECFILE_H +#define CPPGATE_EXECFILE_H + +#include "File.h" + +class ExecFile: public File { +private: + std::string cmd; + std::string cmdAlt; + +public: + ExecFile(std::string path, std::string name, std::string cmd, std::string cmdAlt = ""); + + std::string serialize() const override; + + void display() const override; +}; + + +#endif //CPPGATE_EXECFILE_H diff --git a/gateway-main/librairies/cppGate/File.cpp b/gateway-main/librairies/cppGate/File.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2b904e259391e499f6d7e76b6f704560cd8e7e9d --- /dev/null +++ b/gateway-main/librairies/cppGate/File.cpp @@ -0,0 +1,9 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "File.h" + +File::File(std::string path, std::string name) : path(path), name(name) {} diff --git a/gateway-main/librairies/cppGate/File.h b/gateway-main/librairies/cppGate/File.h new file mode 100644 index 0000000000000000000000000000000000000000..c5d3d1a9e3c968c5bc318832bf7e524fd9e9d536 --- /dev/null +++ b/gateway-main/librairies/cppGate/File.h @@ -0,0 +1,31 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_FILE_H +#define CPPGATE_FILE_H + +#include <iostream> +#include <string> +#include <map> + +class File { +protected: + std::string name; + std::string path; + bool actif = true; + +public: + File(std::string path, std::string name); + + void setInactif() { this->actif = false; } + + virtual std::string serialize() const = 0; + + virtual void display() const = 0; +}; + + +#endif //CPPGATE_FILE_H diff --git a/gateway-main/librairies/cppGate/OutputFile.cpp b/gateway-main/librairies/cppGate/OutputFile.cpp new file mode 100644 index 0000000000000000000000000000000000000000..893c683eb1fc05f6597591a32bc1f0a76098e407 --- /dev/null +++ b/gateway-main/librairies/cppGate/OutputFile.cpp @@ -0,0 +1,79 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include <fstream> +#include "OutputFile.h" + +using json = nlohmann::json; + +OutputFile::OutputFile(std::string path, std::string name) : File(path, name) {} + +std::string OutputFile::read() const { + if(actif) { + if(this->name != "" && this->path != "") { + std::ifstream file (this->path + this->name); + if(!file.is_open()) { + return "{}"; + } + + std::string line, output = ""; + while ( getline (file,line) ) + { + output += line; + } + file.close(); + + return output; + } + } + return "{}"; +} +Dictionary* OutputFile::readAsDictionary() const { + if(actif) { + auto data = json::parse(this->read()); + Dictionary* dictionary_output = new Dictionary("outputFile"); + + Dictionary* outputs = new Dictionary("Outputs"); + for(auto output : data["Outputs"]) { + Dictionary* dico = new Dictionary(output["Dictionary"]["name"]); + dico->deserialize(output["Dictionary"]); + outputs->addArgument(dico); + } + + return dictionary_output; + } + return nullptr; +} + +void OutputFile::writeOutput(Dictionary dictionary) { + auto data = json::parse(this->read()); + + data["Outputs"].push_back(dictionary.serialize()); + + std::string jsonLine = data.dump(); + std::ofstream file (this->path + this->name, std::ios::trunc); + file << jsonLine; + file.close(); +} + + +std::string OutputFile::serialize() const { + if(this->actif) + return R"({"OutputFile": {"path": ")" + this->path + + R"(", "name": ")" + this->name + + R"("}})"; + + return ""; +} + +void OutputFile::displayContent() const { + std::cout << json::parse(this->read()).dump(1) << std::endl; +} +void OutputFile::display() const { + std::cout << this->serialize() << std::endl; +} + + diff --git a/gateway-main/librairies/cppGate/OutputFile.h b/gateway-main/librairies/cppGate/OutputFile.h new file mode 100644 index 0000000000000000000000000000000000000000..61a0929b6cdd1434ac670ea78672719439392114 --- /dev/null +++ b/gateway-main/librairies/cppGate/OutputFile.h @@ -0,0 +1,29 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_OUTPUTFILE_H +#define CPPGATE_OUTPUTFILE_H + +#include "File.h" +#include "Dictionary.h" + +class OutputFile: public File { +public: + OutputFile(std::string path, std::string name); + + std::string read() const; + Dictionary* readAsDictionary() const; + + void writeOutput(Dictionary dictionary); + + std::string serialize() const override; + + void displayContent() const; + void display() const override; +}; + + +#endif //CPPGATE_OUTPUTFILE_H diff --git a/gateway-main/librairies/cppGate/Parameter.cpp b/gateway-main/librairies/cppGate/Parameter.cpp new file mode 100644 index 0000000000000000000000000000000000000000..54eabdbcfce7833bb28774cf7cee8dceaeb5ac3d --- /dev/null +++ b/gateway-main/librairies/cppGate/Parameter.cpp @@ -0,0 +1,39 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "Parameter.h" + +#include <utility> + +Parameter::Parameter(std::string name, std::string value): Argument(name), value(std::move(value)) {} + +Argument *Parameter::getArgument(std::string name) { + if(this->name == name) { + return this; + } + return nullptr; +} + +int Parameter::getValueAsInt() const { + return stoi(this->value); +} + +int Parameter::getValueAsFloat() const { + return stof(this->value); +} + +int Parameter::getValueAsDouble() const { + return stod(this->value); +} + +std::string Parameter::serialize() const { + std::string output = R"({"Parameter": { "name": ")" + this->name + R"(", "value": ")" + this->value + R"("}})"; + return output; +} + +void Parameter::display() const { + std::cout << this->serialize() << std::endl; +} \ No newline at end of file diff --git a/gateway-main/librairies/cppGate/Parameter.h b/gateway-main/librairies/cppGate/Parameter.h new file mode 100644 index 0000000000000000000000000000000000000000..fe7cd4e8494782555633fcbb01bc7ce7fcf8769c --- /dev/null +++ b/gateway-main/librairies/cppGate/Parameter.h @@ -0,0 +1,33 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_PARAMETER_H +#define CPPGATE_PARAMETER_H + +#include <utility> + +#include "Argument.h" + +class Parameter : public Argument{ +private: + std::string value; + +public: + Parameter(std::string name, std::string value); + + Argument* getArgument(std::string name) override; + + int getValueAsInt() const; + int getValueAsFloat() const; + int getValueAsDouble() const; + + std::string serialize() const override; + + void display() const override; +}; + + +#endif //CPPGATE_PARAMETER_H diff --git a/gateway-main/librairies/cppGate/ReceiverStub.cpp b/gateway-main/librairies/cppGate/ReceiverStub.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ec4ecca573da8051bd63933d5f5281943b78bc1b --- /dev/null +++ b/gateway-main/librairies/cppGate/ReceiverStub.cpp @@ -0,0 +1,44 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "ReceiverStub.h" +using json = nlohmann::json; + +ReceiverStub::ReceiverStub(int argc, char *argv[]): Stub(nullptr) { + this->readArguments(argc, argv); +} + +std::string ReceiverStub::readArguments(int argc, char *argv[]) { + if(argc >= 1) { + std::vector<std::string> args; + for(int k = 0; k < argc; k++) { + args.push_back(std::string(argv[k])); + } + + return args[1]; + } + + this->actif = false; + this->outputFile->setInactif(); + return ""; +} + +void ReceiverStub::deserialize(std::string jsonLine) { + if(this->actif) { + auto data = json::parse(jsonLine); + + this->outputFile = new OutputFile(data["OutputFile"]["path"], data["OutputFile"]["name"]); + + auto dictionaries_json = data["Dictionaries"]; + + for(auto dictionary_json : dictionaries_json) { + Dictionary* dictionary = new Dictionary(""); + dictionary->deserialize(dictionary_json["Dictionary"]); + + this->dictionaries.push_back(dictionary); + } + } +} diff --git a/gateway-main/librairies/cppGate/ReceiverStub.h b/gateway-main/librairies/cppGate/ReceiverStub.h new file mode 100644 index 0000000000000000000000000000000000000000..f7d1ff5a50880d13dc38bd66bf36f226adb351b3 --- /dev/null +++ b/gateway-main/librairies/cppGate/ReceiverStub.h @@ -0,0 +1,25 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_RECEIVERSTUB_H +#define CPPGATE_RECEIVERSTUB_H + +#include "Stub.h" + +class ReceiverStub: public Stub { +private: + bool actif = true; + +public: + ReceiverStub(int argc, char *argv[]); + + std::string readArguments(int argc, char *argv[]); + + void deserialize(std::string jsonLine); +}; + + +#endif //CPPGATE_RECEIVERSTUB_H diff --git a/gateway-main/librairies/cppGate/SenderStub.cpp b/gateway-main/librairies/cppGate/SenderStub.cpp new file mode 100644 index 0000000000000000000000000000000000000000..baaf10055b82d9f86788f55ae7305ecad6b08d6f --- /dev/null +++ b/gateway-main/librairies/cppGate/SenderStub.cpp @@ -0,0 +1,87 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "SenderStub.h" + +SenderStub::SenderStub(ExecFile *execFile, OutputFile *outputFile) : Stub(outputFile) { + this->execFile = execFile; +} + +void SenderStub::run(std::string gatePath, std::string gateName) { + std::string command = gatePath + gateName + " " + this->getSentLine(); + + std::cout << " =============== Running gateway =============== " << std::endl; + FILE *file = popen(command.c_str(), "r"); + + char buffer[100]; + std::string stringBuff; + + if (file == nullptr) perror ("Error opening file"); + else { + while ( !feof(file) ) { + if ( fgets (buffer , 100 , file) == nullptr ) break; + stringBuff += buffer; + } + fclose (file); + } + std::cout << stringBuff << std::endl; + std::cout << " =============== Gateway ending ================ " << std::endl; + +} + +std::string SenderStub::getSentLine() const { + std::string line = this->serialize(); + int pos = line.find("\""); + while(pos != std::string::npos) { + line.replace(pos, 1, "\\\""); + pos = line.find("\"", pos + 2); + + std::cout << line << std::endl; + std::cout << pos << " " << std::string::npos << std::endl; + } + + return line; +} + +std::string SenderStub::serialize() const { + std::string output = this->execFile->serialize(); + + output.pop_back(); output += ", "; + std::string outputFile = this->outputFile->serialize(); + outputFile.erase(outputFile.begin()); + output += outputFile; + + output.pop_back(); output += R"(, "Dictionaries": [)"; + + for(int k = 0; k < this->dictionaries.size(); k++) { + output += this->dictionaries[k]->serialize(); + if(k != this->dictionaries.size()-1) + output += ", "; + } + output += "]}"; + + return output; +} +std::string SenderStub::displayExecFile() const{ + std::string output = this->serialize(); + + std::cout << output << std::endl; + return output; +} +std::string SenderStub::displayAll() const{ + std::string output = this->displayExecFile() + Stub::displayAll(); + + std::cout << output << std::endl; + return output; +} + +ExecFile *SenderStub::getExecFile() const { + return execFile; +} + +void SenderStub::setExecFile(ExecFile *execFile) { + SenderStub::execFile = execFile; +} diff --git a/gateway-main/librairies/cppGate/SenderStub.h b/gateway-main/librairies/cppGate/SenderStub.h new file mode 100644 index 0000000000000000000000000000000000000000..fd6224f80d9d689e233431faf771f269914f0f0c --- /dev/null +++ b/gateway-main/librairies/cppGate/SenderStub.h @@ -0,0 +1,34 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_SENDERSTUB_H +#define CPPGATE_SENDERSTUB_H + +#include "Stub.h" +#include "ExecFile.h" + +class SenderStub: public Stub { +private: + ExecFile* execFile; + +public: + explicit SenderStub(ExecFile* execFile = nullptr, OutputFile* outputFile = nullptr); + + void run(std::string gatePath, std::string gateName); + + ExecFile *getExecFile() const; + + void setExecFile(ExecFile *execFile); + + std::string getSentLine() const; + + std::string serialize() const; + std::string displayExecFile() const; + std::string displayAll() const override; +}; + + +#endif //CPPGATE_SENDERSTUB_H diff --git a/gateway-main/librairies/cppGate/Stub.cpp b/gateway-main/librairies/cppGate/Stub.cpp new file mode 100644 index 0000000000000000000000000000000000000000..371d5615aabded19ea94132ad96ff3c70068a5fa --- /dev/null +++ b/gateway-main/librairies/cppGate/Stub.cpp @@ -0,0 +1,93 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#include "Stub.h" + +#include <utility> + +Stub::Stub(OutputFile* outputFile) { + if(outputFile == nullptr) { + outputFile = new OutputFile("",""); + } + this->outputFile = outputFile; + this->actif = true; +} + +Argument *Stub::findArgumentWithName(std::string name) { + if(this->actif) { + Argument* arg = nullptr; + for(auto dictionary : this->dictionaries) { + arg = dictionary->getArgument(name); + if(arg != nullptr) + return dictionary; + } + } + return nullptr; +} +Dictionary *Stub::findDictionaryWithName(std::string name) { + if(this->actif) + for(auto dico : this->dictionaries) + if(dico->getName() == name) + return dico; + return nullptr; +} +Argument *Stub::getArgument(std::string name) { + if(this->actif) + return this->findArgumentWithName(std::move(name)); + + return nullptr; +} + +std::string Stub::displayDictionaries() const{ + if(actif) { + std::string output = "{"; + + for(int k = 0; k < this->dictionaries.size(); k++) { + output += this->dictionaries[k]->serialize(); + if(k != this->dictionaries.size() -1) + output += ", "; + } + output += "}"; + + std::cout << output << std::endl; + return output; + } + return ""; +} +std::string Stub::displayOutputFile() const{ + if(actif) { + std::string output = outputFile->serialize(); + + std::cout << output << std::endl; + return output; + } + return ""; +} +std::string Stub::displayAll() const{ + if(actif) { + std::string output = this->displayOutputFile() + this->displayDictionaries(); + + std::cout << output << std::endl; + return output; + } + return ""; +} + +OutputFile *Stub::getOutputFile() const { + return outputFile; +} +const std::vector<Dictionary *> &Stub::getDictionaries() const { + return dictionaries; +} + +void Stub::setOutputFile(OutputFile *outputFile) { + Stub::outputFile = outputFile; +} +void Stub::addDictionary(Dictionary *dictionary) { + this->dictionaries.push_back(dictionary); +} + + diff --git a/gateway-main/librairies/cppGate/Stub.h b/gateway-main/librairies/cppGate/Stub.h new file mode 100644 index 0000000000000000000000000000000000000000..604f275e0ffc7d9e3a868c63626c2eae315b3d38 --- /dev/null +++ b/gateway-main/librairies/cppGate/Stub.h @@ -0,0 +1,41 @@ +// +// Created by tlabrosse on july 2022 +// licence : GNU lgpl +// you can contact me at : theo.labt@gmail.com +// + +#ifndef CPPGATE_STUB_H +#define CPPGATE_STUB_H + +#include <vector> +#include "OutputFile.h" +#include "Dictionary.h" + + +class Stub { +protected: + OutputFile* outputFile; + std::vector<Dictionary*> dictionaries; + bool actif; + +public: + Stub(OutputFile* outputFile = nullptr); + + Argument* findArgumentWithName(std::string name); + Dictionary* findDictionaryWithName(std::string name); + Argument* getArgument(std::string name); + + OutputFile *getOutputFile() const; + const std::vector<Dictionary *> &getDictionaries() const; + + void setOutputFile(OutputFile *outputFile); + void addDictionary(Dictionary* dictionary); + + std::string displayDictionaries() const; + std::string displayOutputFile() const; + + virtual std::string displayAll() const; +}; + + +#endif //CPPGATE_STUB_H diff --git a/gateway-main/librairies/cppGate/lib/json.hpp b/gateway-main/librairies/cppGate/lib/json.hpp new file mode 100644 index 0000000000000000000000000000000000000000..2837e74b9e5aa3685f1a6b8fd30a0c1637c3889f --- /dev/null +++ b/gateway-main/librairies/cppGate/lib/json.hpp @@ -0,0 +1,23635 @@ +/* + __ _____ _____ _____ + __| | __| | | | JSON for Modern C++ +| | |__ | | | | | | version 3.10.5 +|_____|_____|_____|_|___| https://github.com/nlohmann/json + +Licensed under the MIT License <http://opensource.org/licenses/MIT>. +SPDX-License-Identifier: MIT +Copyright (c) 2013-2022 Niels Lohmann <http://nlohmann.me>. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +/****************************************************************************\ + * Note on documentation: The source files contain links to the online * + * documentation of the public API at https://json.nlohmann.me. This URL * + * contains the most recent documentation and should also be applicable to * + * previous versions; documentation for deprecated functions is not * + * removed, but marked deprecated. See "Generate documentation" section in * + * file docs/README.md. * +\****************************************************************************/ + +#ifndef INCLUDE_NLOHMANN_JSON_HPP_ +#define INCLUDE_NLOHMANN_JSON_HPP_ + +#ifndef JSON_SKIP_LIBRARY_VERSION_CHECK + #if defined(NLOHMANN_JSON_VERSION_MAJOR) && defined(NLOHMANN_JSON_VERSION_MINOR) && defined(NLOHMANN_JSON_VERSION_PATCH) + #if NLOHMANN_JSON_VERSION_MAJOR != 3 || NLOHMANN_JSON_VERSION_MINOR != 10 || NLOHMANN_JSON_VERSION_PATCH != 5 + #warning "Already included a different version of the library!" + #endif + #endif +#endif + +#define NLOHMANN_JSON_VERSION_MAJOR 3 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_MINOR 10 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_PATCH 5 // NOLINT(modernize-macro-to-enum) + +#include <algorithm> // all_of, find, for_each +#include <cstddef> // nullptr_t, ptrdiff_t, size_t +#include <functional> // hash, less +#include <initializer_list> // initializer_list +#ifndef JSON_NO_IO + #include <iosfwd> // istream, ostream +#endif // JSON_NO_IO +#include <iterator> // random_access_iterator_tag +#include <memory> // unique_ptr +#include <numeric> // accumulate +#include <string> // string, stoi, to_string +#include <utility> // declval, forward, move, pair, swap +#include <vector> // vector + +// #include <nlohmann/adl_serializer.hpp> + + +#include <type_traits> +#include <utility> + +// #include <nlohmann/detail/conversions/from_json.hpp> + + +#include <algorithm> // transform +#include <array> // array +#include <forward_list> // forward_list +#include <iterator> // inserter, front_inserter, end +#include <map> // map +#include <string> // string +#include <tuple> // tuple, make_tuple +#include <type_traits> // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +#include <unordered_map> // unordered_map +#include <utility> // pair, declval +#include <valarray> // valarray + +// #include <nlohmann/detail/exceptions.hpp> + + +#include <cstddef> // nullptr_t +#include <exception> // exception +#include <stdexcept> // runtime_error +#include <string> // to_string +#include <vector> // vector + +// #include <nlohmann/detail/value_t.hpp> + + +#include <array> // array +#include <cstddef> // size_t +#include <cstdint> // uint8_t +#include <string> // string + +// #include <nlohmann/detail/macro_scope.hpp> + + +#include <utility> // declval, pair +// #include <nlohmann/thirdparty/hedley/hedley.hpp> + + +/* Hedley - https://nemequ.github.io/hedley + * Created by Evan Nemerson <evan@nemerson.com> + * + * To the extent possible under law, the author(s) have dedicated all + * copyright and related and neighboring rights to this software to + * the public domain worldwide. This software is distributed without + * any warranty. + * + * For details, see <http://creativecommons.org/publicdomain/zero/1.0/>. + * SPDX-License-Identifier: CC0-1.0 + */ + +#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 15) +#if defined(JSON_HEDLEY_VERSION) + #undef JSON_HEDLEY_VERSION +#endif +#define JSON_HEDLEY_VERSION 15 + +#if defined(JSON_HEDLEY_STRINGIFY_EX) + #undef JSON_HEDLEY_STRINGIFY_EX +#endif +#define JSON_HEDLEY_STRINGIFY_EX(x) #x + +#if defined(JSON_HEDLEY_STRINGIFY) + #undef JSON_HEDLEY_STRINGIFY +#endif +#define JSON_HEDLEY_STRINGIFY(x) JSON_HEDLEY_STRINGIFY_EX(x) + +#if defined(JSON_HEDLEY_CONCAT_EX) + #undef JSON_HEDLEY_CONCAT_EX +#endif +#define JSON_HEDLEY_CONCAT_EX(a,b) a##b + +#if defined(JSON_HEDLEY_CONCAT) + #undef JSON_HEDLEY_CONCAT +#endif +#define JSON_HEDLEY_CONCAT(a,b) JSON_HEDLEY_CONCAT_EX(a,b) + +#if defined(JSON_HEDLEY_CONCAT3_EX) + #undef JSON_HEDLEY_CONCAT3_EX +#endif +#define JSON_HEDLEY_CONCAT3_EX(a,b,c) a##b##c + +#if defined(JSON_HEDLEY_CONCAT3) + #undef JSON_HEDLEY_CONCAT3 +#endif +#define JSON_HEDLEY_CONCAT3(a,b,c) JSON_HEDLEY_CONCAT3_EX(a,b,c) + +#if defined(JSON_HEDLEY_VERSION_ENCODE) + #undef JSON_HEDLEY_VERSION_ENCODE +#endif +#define JSON_HEDLEY_VERSION_ENCODE(major,minor,revision) (((major) * 1000000) + ((minor) * 1000) + (revision)) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MAJOR) + #undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MAJOR(version) ((version) / 1000000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MINOR) + #undef JSON_HEDLEY_VERSION_DECODE_MINOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MINOR(version) (((version) % 1000000) / 1000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_REVISION) + #undef JSON_HEDLEY_VERSION_DECODE_REVISION +#endif +#define JSON_HEDLEY_VERSION_DECODE_REVISION(version) ((version) % 1000) + +#if defined(JSON_HEDLEY_GNUC_VERSION) + #undef JSON_HEDLEY_GNUC_VERSION +#endif +#if defined(__GNUC__) && defined(__GNUC_PATCHLEVEL__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, __GNUC_PATCHLEVEL__) +#elif defined(__GNUC__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, 0) +#endif + +#if defined(JSON_HEDLEY_GNUC_VERSION_CHECK) + #undef JSON_HEDLEY_GNUC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GNUC_VERSION) + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GNUC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION) + #undef JSON_HEDLEY_MSVC_VERSION +#endif +#if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 140000000) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 10000000, (_MSC_FULL_VER % 10000000) / 100000, (_MSC_FULL_VER % 100000) / 100) +#elif defined(_MSC_FULL_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 1000000, (_MSC_FULL_VER % 1000000) / 10000, (_MSC_FULL_VER % 10000) / 10) +#elif defined(_MSC_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_VER / 100, _MSC_VER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION_CHECK) + #undef JSON_HEDLEY_MSVC_VERSION_CHECK +#endif +#if !defined(JSON_HEDLEY_MSVC_VERSION) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (0) +#elif defined(_MSC_VER) && (_MSC_VER >= 1400) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 10000000) + (minor * 100000) + (patch))) +#elif defined(_MSC_VER) && (_MSC_VER >= 1200) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 1000000) + (minor * 10000) + (patch))) +#else + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_VER >= ((major * 100) + (minor))) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION) + #undef JSON_HEDLEY_INTEL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, __INTEL_COMPILER_UPDATE) +#elif defined(__INTEL_COMPILER) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_VERSION) + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #undef JSON_HEDLEY_INTEL_CL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && defined(__ICL) + #define JSON_HEDLEY_INTEL_CL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER, __INTEL_COMPILER_UPDATE, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_CL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION) + #undef JSON_HEDLEY_PGI_VERSION +#endif +#if defined(__PGI) && defined(__PGIC__) && defined(__PGIC_MINOR__) && defined(__PGIC_PATCHLEVEL__) + #define JSON_HEDLEY_PGI_VERSION JSON_HEDLEY_VERSION_ENCODE(__PGIC__, __PGIC_MINOR__, __PGIC_PATCHLEVEL__) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION_CHECK) + #undef JSON_HEDLEY_PGI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PGI_VERSION) + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PGI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #undef JSON_HEDLEY_SUNPRO_VERSION +#endif +#if defined(__SUNPRO_C) && (__SUNPRO_C > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_C >> 16) & 0xf) * 10) + ((__SUNPRO_C >> 12) & 0xf), (((__SUNPRO_C >> 8) & 0xf) * 10) + ((__SUNPRO_C >> 4) & 0xf), (__SUNPRO_C & 0xf) * 10) +#elif defined(__SUNPRO_C) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_C >> 8) & 0xf, (__SUNPRO_C >> 4) & 0xf, (__SUNPRO_C) & 0xf) +#elif defined(__SUNPRO_CC) && (__SUNPRO_CC > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_CC >> 16) & 0xf) * 10) + ((__SUNPRO_CC >> 12) & 0xf), (((__SUNPRO_CC >> 8) & 0xf) * 10) + ((__SUNPRO_CC >> 4) & 0xf), (__SUNPRO_CC & 0xf) * 10) +#elif defined(__SUNPRO_CC) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_CC >> 8) & 0xf, (__SUNPRO_CC >> 4) & 0xf, (__SUNPRO_CC) & 0xf) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION_CHECK) + #undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_SUNPRO_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#endif +#if defined(__EMSCRIPTEN__) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION JSON_HEDLEY_VERSION_ENCODE(__EMSCRIPTEN_major__, __EMSCRIPTEN_minor__, __EMSCRIPTEN_tiny__) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_EMSCRIPTEN_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION) + #undef JSON_HEDLEY_ARM_VERSION +#endif +#if defined(__CC_ARM) && defined(__ARMCOMPILER_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCOMPILER_VERSION / 1000000, (__ARMCOMPILER_VERSION % 1000000) / 10000, (__ARMCOMPILER_VERSION % 10000) / 100) +#elif defined(__CC_ARM) && defined(__ARMCC_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCC_VERSION / 1000000, (__ARMCC_VERSION % 1000000) / 10000, (__ARMCC_VERSION % 10000) / 100) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION_CHECK) + #undef JSON_HEDLEY_ARM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_ARM_VERSION) + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_ARM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION) + #undef JSON_HEDLEY_IBM_VERSION +#endif +#if defined(__ibmxl__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ibmxl_version__, __ibmxl_release__, __ibmxl_modification__) +#elif defined(__xlC__) && defined(__xlC_ver__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, (__xlC_ver__ >> 8) & 0xff) +#elif defined(__xlC__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, 0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION_CHECK) + #undef JSON_HEDLEY_IBM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IBM_VERSION) + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IBM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_VERSION) + #undef JSON_HEDLEY_TI_VERSION +#endif +#if \ + defined(__TI_COMPILER_VERSION__) && \ + ( \ + defined(__TMS470__) || defined(__TI_ARM__) || \ + defined(__MSP430__) || \ + defined(__TMS320C2000__) \ + ) +#if (__TI_COMPILER_VERSION__ >= 16000000) + #define JSON_HEDLEY_TI_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif +#endif + +#if defined(JSON_HEDLEY_TI_VERSION_CHECK) + #undef JSON_HEDLEY_TI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_VERSION) + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #undef JSON_HEDLEY_TI_CL2000_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C2000__) + #define JSON_HEDLEY_TI_CL2000_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL2000_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #undef JSON_HEDLEY_TI_CL430_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__MSP430__) + #define JSON_HEDLEY_TI_CL430_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL430_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #undef JSON_HEDLEY_TI_ARMCL_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && (defined(__TMS470__) || defined(__TI_ARM__)) + #define JSON_HEDLEY_TI_ARMCL_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION_CHECK) + #undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_ARMCL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #undef JSON_HEDLEY_TI_CL6X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C6X__) + #define JSON_HEDLEY_TI_CL6X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL6X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #undef JSON_HEDLEY_TI_CL7X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__C7000__) + #define JSON_HEDLEY_TI_CL7X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL7X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #undef JSON_HEDLEY_TI_CLPRU_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__PRU__) + #define JSON_HEDLEY_TI_CLPRU_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CLPRU_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION) + #undef JSON_HEDLEY_CRAY_VERSION +#endif +#if defined(_CRAYC) + #if defined(_RELEASE_PATCHLEVEL) + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, _RELEASE_PATCHLEVEL) + #else + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION_CHECK) + #undef JSON_HEDLEY_CRAY_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_CRAY_VERSION) + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_CRAY_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION) + #undef JSON_HEDLEY_IAR_VERSION +#endif +#if defined(__IAR_SYSTEMS_ICC__) + #if __VER__ > 1000 + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE((__VER__ / 1000000), ((__VER__ / 1000) % 1000), (__VER__ % 1000)) + #else + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE(__VER__ / 100, __VER__ % 100, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION_CHECK) + #undef JSON_HEDLEY_IAR_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IAR_VERSION) + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IAR_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION) + #undef JSON_HEDLEY_TINYC_VERSION +#endif +#if defined(__TINYC__) + #define JSON_HEDLEY_TINYC_VERSION JSON_HEDLEY_VERSION_ENCODE(__TINYC__ / 1000, (__TINYC__ / 100) % 10, __TINYC__ % 100) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION_CHECK) + #undef JSON_HEDLEY_TINYC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TINYC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION) + #undef JSON_HEDLEY_DMC_VERSION +#endif +#if defined(__DMC__) + #define JSON_HEDLEY_DMC_VERSION JSON_HEDLEY_VERSION_ENCODE(__DMC__ >> 8, (__DMC__ >> 4) & 0xf, __DMC__ & 0xf) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION_CHECK) + #undef JSON_HEDLEY_DMC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_DMC_VERSION) + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_DMC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #undef JSON_HEDLEY_COMPCERT_VERSION +#endif +#if defined(__COMPCERT_VERSION__) + #define JSON_HEDLEY_COMPCERT_VERSION JSON_HEDLEY_VERSION_ENCODE(__COMPCERT_VERSION__ / 10000, (__COMPCERT_VERSION__ / 100) % 100, __COMPCERT_VERSION__ % 100) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION_CHECK) + #undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_COMPCERT_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION) + #undef JSON_HEDLEY_PELLES_VERSION +#endif +#if defined(__POCC__) + #define JSON_HEDLEY_PELLES_VERSION JSON_HEDLEY_VERSION_ENCODE(__POCC__ / 100, __POCC__ % 100, 0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION_CHECK) + #undef JSON_HEDLEY_PELLES_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PELLES_VERSION) + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PELLES_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #undef JSON_HEDLEY_MCST_LCC_VERSION +#endif +#if defined(__LCC__) && defined(__LCC_MINOR__) + #define JSON_HEDLEY_MCST_LCC_VERSION JSON_HEDLEY_VERSION_ENCODE(__LCC__ / 100, __LCC__ % 100, __LCC_MINOR__) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION_CHECK) + #undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_MCST_LCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION) + #undef JSON_HEDLEY_GCC_VERSION +#endif +#if \ + defined(JSON_HEDLEY_GNUC_VERSION) && \ + !defined(__clang__) && \ + !defined(JSON_HEDLEY_INTEL_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_ARM_VERSION) && \ + !defined(JSON_HEDLEY_CRAY_VERSION) && \ + !defined(JSON_HEDLEY_TI_VERSION) && \ + !defined(JSON_HEDLEY_TI_ARMCL_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL430_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL2000_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL6X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL7X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CLPRU_VERSION) && \ + !defined(__COMPCERT__) && \ + !defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION JSON_HEDLEY_GNUC_VERSION +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_ATTRIBUTE +#endif +#if \ + defined(__has_attribute) && \ + ( \ + (!defined(JSON_HEDLEY_IAR_VERSION) || JSON_HEDLEY_IAR_VERSION_CHECK(8,5,9)) \ + ) +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) __has_attribute(attribute) +#else +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#endif +#if \ + defined(__has_cpp_attribute) && \ + defined(__cplusplus) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#endif +#if !defined(__cplusplus) || !defined(__has_cpp_attribute) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#elif \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ + (!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_BUILTIN) + #undef JSON_HEDLEY_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_HAS_BUILTIN(builtin) __has_builtin(builtin) +#else + #define JSON_HEDLEY_HAS_BUILTIN(builtin) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_BUILTIN) + #undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_BUILTIN) + #undef JSON_HEDLEY_GCC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_FEATURE) + #undef JSON_HEDLEY_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_HAS_FEATURE(feature) __has_feature(feature) +#else + #define JSON_HEDLEY_HAS_FEATURE(feature) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_FEATURE) + #undef JSON_HEDLEY_GNUC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_FEATURE) + #undef JSON_HEDLEY_GCC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_EXTENSION) + #undef JSON_HEDLEY_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_HAS_EXTENSION(extension) __has_extension(extension) +#else + #define JSON_HEDLEY_HAS_EXTENSION(extension) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_EXTENSION) + #undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_EXTENSION) + #undef JSON_HEDLEY_GCC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_WARNING) + #undef JSON_HEDLEY_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_HAS_WARNING(warning) __has_warning(warning) +#else + #define JSON_HEDLEY_HAS_WARNING(warning) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_WARNING) + #undef JSON_HEDLEY_GNUC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_WARNING) + #undef JSON_HEDLEY_GCC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + defined(__clang__) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,17) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(8,0,0) || \ + (JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) && defined(__C99_PRAGMA_OPERATOR)) + #define JSON_HEDLEY_PRAGMA(value) _Pragma(#value) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_PRAGMA(value) __pragma(value) +#else + #define JSON_HEDLEY_PRAGMA(value) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_PUSH) + #undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#endif +#if defined(JSON_HEDLEY_DIAGNOSTIC_POP) + #undef JSON_HEDLEY_DIAGNOSTIC_POP +#endif +#if defined(__clang__) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("clang diagnostic pop") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH __pragma(warning(push)) + #define JSON_HEDLEY_DIAGNOSTIC_POP __pragma(warning(pop)) +#elif JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("pop") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("diag_push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("diag_pop") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_PUSH + #define JSON_HEDLEY_DIAGNOSTIC_POP +#endif + +/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat") +# if JSON_HEDLEY_HAS_WARNING("-Wc++17-extensions") +# if JSON_HEDLEY_HAS_WARNING("-Wc++1z-extensions") +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + _Pragma("clang diagnostic ignored \"-Wc++1z-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# endif +#endif +#if !defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x +#endif + +#if defined(JSON_HEDLEY_CONST_CAST) + #undef JSON_HEDLEY_CONST_CAST +#endif +#if defined(__cplusplus) +# define JSON_HEDLEY_CONST_CAST(T, expr) (const_cast<T>(expr)) +#elif \ + JSON_HEDLEY_HAS_WARNING("-Wcast-qual") || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_CONST_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_CONST_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_REINTERPRET_CAST) + #undef JSON_HEDLEY_REINTERPRET_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) (reinterpret_cast<T>(expr)) +#else + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_STATIC_CAST) + #undef JSON_HEDLEY_STATIC_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_STATIC_CAST(T, expr) (static_cast<T>(expr)) +#else + #define JSON_HEDLEY_STATIC_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_CPP_CAST) + #undef JSON_HEDLEY_CPP_CAST +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wold-style-cast") +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wold-style-cast\"") \ + ((T) (expr)) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# elif JSON_HEDLEY_IAR_VERSION_CHECK(8,3,0) +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("diag_suppress=Pe137") \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_CPP_CAST(T, expr) ((T) (expr)) +# endif +#else +# define JSON_HEDLEY_CPP_CAST(T, expr) (expr) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wdeprecated-declarations") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warning(disable:1478 1786)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:1478 1786)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1216,1444,1445") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:4996)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1291,1718") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && !defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,E_DEPRECATED_ATT,E_DEPRECATED_ATT_MESS)") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,symdeprecated,symdeprecated2)") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress=Pe1444,Pe1215") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warn(disable:2241)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("clang diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("warning(disable:161)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:161)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 1675") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("GCC diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:4068)) +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(16,9,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress=Pe161") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 161") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:1292)) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097,1098") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress=Pe1097") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wcast-qual") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("clang diagnostic ignored \"-Wcast-qual\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("warning(disable:2203 2331)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("GCC diagnostic ignored \"-Wcast-qual\"") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunused-function") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("clang diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("GCC diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(1,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION __pragma(warning(disable:4505)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("diag_suppress 3142") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif + +#if defined(JSON_HEDLEY_DEPRECATED) + #undef JSON_HEDLEY_DEPRECATED +#endif +#if defined(JSON_HEDLEY_DEPRECATED_FOR) + #undef JSON_HEDLEY_DEPRECATED_FOR +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated("Since " # since)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated("Since " #since "; use " #replacement)) +#elif \ + (JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__("Since " #since))) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__("Since " #since "; use " #replacement))) +#elif defined(__cplusplus) && (__cplusplus >= 201402L) + #define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]]) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(deprecated) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated") + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) _Pragma("deprecated") +#else + #define JSON_HEDLEY_DEPRECATED(since) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) +#endif + +#if defined(JSON_HEDLEY_UNAVAILABLE) + #undef JSON_HEDLEY_UNAVAILABLE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warning) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNAVAILABLE(available_since) __attribute__((__warning__("Not available until " #available_since))) +#else + #define JSON_HEDLEY_UNAVAILABLE(available_since) +#endif + +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT +#endif +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT_MSG) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__)) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) __attribute__((__warn_unused_result__)) +#elif (JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) >= 201907L) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard(msg)]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) +#elif defined(_Check_return_) /* SAL */ + #define JSON_HEDLEY_WARN_UNUSED_RESULT _Check_return_ + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) _Check_return_ +#else + #define JSON_HEDLEY_WARN_UNUSED_RESULT + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) +#endif + +#if defined(JSON_HEDLEY_SENTINEL) + #undef JSON_HEDLEY_SENTINEL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(sentinel) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_SENTINEL(position) __attribute__((__sentinel__(position))) +#else + #define JSON_HEDLEY_SENTINEL(position) +#endif + +#if defined(JSON_HEDLEY_NO_RETURN) + #undef JSON_HEDLEY_NO_RETURN +#endif +#if JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NO_RETURN __noreturn +#elif \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L + #define JSON_HEDLEY_NO_RETURN _Noreturn +#elif defined(__cplusplus) && (__cplusplus >= 201103L) + #define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NO_RETURN _Pragma("FUNC_NEVER_RETURNS;") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NO_RETURN __attribute((noreturn)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#else + #define JSON_HEDLEY_NO_RETURN +#endif + +#if defined(JSON_HEDLEY_NO_ESCAPE) + #undef JSON_HEDLEY_NO_ESCAPE +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape) + #define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__)) +#else + #define JSON_HEDLEY_NO_ESCAPE +#endif + +#if defined(JSON_HEDLEY_UNREACHABLE) + #undef JSON_HEDLEY_UNREACHABLE +#endif +#if defined(JSON_HEDLEY_UNREACHABLE_RETURN) + #undef JSON_HEDLEY_UNREACHABLE_RETURN +#endif +#if defined(JSON_HEDLEY_ASSUME) + #undef JSON_HEDLEY_ASSUME +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_ASSUME(expr) __assume(expr) +#elif JSON_HEDLEY_HAS_BUILTIN(__builtin_assume) + #define JSON_HEDLEY_ASSUME(expr) __builtin_assume(expr) +#elif \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #if defined(__cplusplus) + #define JSON_HEDLEY_ASSUME(expr) std::_nassert(expr) + #else + #define JSON_HEDLEY_ASSUME(expr) _nassert(expr) + #endif +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_unreachable) && (!defined(JSON_HEDLEY_ARM_VERSION))) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,5) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(10,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNREACHABLE() __builtin_unreachable() +#elif defined(JSON_HEDLEY_ASSUME) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif +#if !defined(JSON_HEDLEY_ASSUME) + #if defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, ((expr) ? 1 : (JSON_HEDLEY_UNREACHABLE(), 1))) + #else + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, expr) + #endif +#endif +#if defined(JSON_HEDLEY_UNREACHABLE) + #if \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (JSON_HEDLEY_STATIC_CAST(void, JSON_HEDLEY_ASSUME(0)), (value)) + #else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) JSON_HEDLEY_UNREACHABLE() + #endif +#else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (value) +#endif +#if !defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif + +JSON_HEDLEY_DIAGNOSTIC_PUSH +#if JSON_HEDLEY_HAS_WARNING("-Wpedantic") + #pragma clang diagnostic ignored "-Wpedantic" +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus) + #pragma clang diagnostic ignored "-Wc++98-compat-pedantic" +#endif +#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0) + #if defined(__clang__) + #pragma clang diagnostic ignored "-Wvariadic-macros" + #elif defined(JSON_HEDLEY_GCC_VERSION) + #pragma GCC diagnostic ignored "-Wvariadic-macros" + #endif +#endif +#if defined(JSON_HEDLEY_NON_NULL) + #undef JSON_HEDLEY_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NON_NULL(...) __attribute__((__nonnull__(__VA_ARGS__))) +#else + #define JSON_HEDLEY_NON_NULL(...) +#endif +JSON_HEDLEY_DIAGNOSTIC_POP + +#if defined(JSON_HEDLEY_PRINTF_FORMAT) + #undef JSON_HEDLEY_PRINTF_FORMAT +#endif +#if defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && !defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(ms_printf, string_idx, first_to_check))) +#elif defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(gnu_printf, string_idx, first_to_check))) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(format) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(__printf__, string_idx, first_to_check))) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(6,0,0) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __declspec(vaformat(printf,string_idx,first_to_check)) +#else + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) +#endif + +#if defined(JSON_HEDLEY_CONSTEXPR) + #undef JSON_HEDLEY_CONSTEXPR +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr) + #endif +#endif +#if !defined(JSON_HEDLEY_CONSTEXPR) + #define JSON_HEDLEY_CONSTEXPR +#endif + +#if defined(JSON_HEDLEY_PREDICT) + #undef JSON_HEDLEY_PREDICT +#endif +#if defined(JSON_HEDLEY_LIKELY) + #undef JSON_HEDLEY_LIKELY +#endif +#if defined(JSON_HEDLEY_UNLIKELY) + #undef JSON_HEDLEY_UNLIKELY +#endif +#if defined(JSON_HEDLEY_UNPREDICTABLE) + #undef JSON_HEDLEY_UNPREDICTABLE +#endif +#if JSON_HEDLEY_HAS_BUILTIN(__builtin_unpredictable) + #define JSON_HEDLEY_UNPREDICTABLE(expr) __builtin_unpredictable((expr)) +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect_with_probability) && !defined(JSON_HEDLEY_PGI_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(9,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, value, probability) __builtin_expect_with_probability( (expr), (value), (probability)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) __builtin_expect_with_probability(!!(expr), 1 , (probability)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) __builtin_expect_with_probability(!!(expr), 0 , (probability)) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect (!!(expr), 1 ) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect (!!(expr), 0 ) +#elif \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,27) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, expected, probability) \ + (((probability) >= 0.9) ? __builtin_expect((expr), (expected)) : (JSON_HEDLEY_STATIC_CAST(void, expected), (expr))) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 1) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 0) : !!(expr))); \ + })) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 0) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 1) : !!(expr))); \ + })) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect(!!(expr), 1) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect(!!(expr), 0) +#else +# define JSON_HEDLEY_PREDICT(expr, expected, probability) (JSON_HEDLEY_STATIC_CAST(void, expected), (expr)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_LIKELY(expr) (!!(expr)) +# define JSON_HEDLEY_UNLIKELY(expr) (!!(expr)) +#endif +#if !defined(JSON_HEDLEY_UNPREDICTABLE) + #define JSON_HEDLEY_UNPREDICTABLE(expr) JSON_HEDLEY_PREDICT(expr, 1, 0.5) +#endif + +#if defined(JSON_HEDLEY_MALLOC) + #undef JSON_HEDLEY_MALLOC +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(malloc) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_MALLOC __attribute__((__malloc__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_MALLOC __declspec(restrict) +#else + #define JSON_HEDLEY_MALLOC +#endif + +#if defined(JSON_HEDLEY_PURE) + #undef JSON_HEDLEY_PURE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(pure) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,96,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PURE __attribute__((__pure__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) +# define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data") +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) \ + ) +# define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;") +#else +# define JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_CONST) + #undef JSON_HEDLEY_CONST +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(const) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_CONST __attribute__((__const__)) +#elif \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_CONST _Pragma("no_side_effect") +#else + #define JSON_HEDLEY_CONST JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_RESTRICT) + #undef JSON_HEDLEY_RESTRICT +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT restrict +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,4) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + defined(__clang__) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RESTRICT __restrict +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,3,0) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT _Restrict +#else + #define JSON_HEDLEY_RESTRICT +#endif + +#if defined(JSON_HEDLEY_INLINE) + #undef JSON_HEDLEY_INLINE +#endif +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + (defined(__cplusplus) && (__cplusplus >= 199711L)) + #define JSON_HEDLEY_INLINE inline +#elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(6,2,0) + #define JSON_HEDLEY_INLINE __inline__ +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,1,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_INLINE __inline +#else + #define JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_ALWAYS_INLINE) + #undef JSON_HEDLEY_ALWAYS_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(always_inline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) +# define JSON_HEDLEY_ALWAYS_INLINE __attribute__((__always_inline__)) JSON_HEDLEY_INLINE +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_ALWAYS_INLINE __forceinline +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) \ + ) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("FUNC_ALWAYS_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("inline=forced") +#else +# define JSON_HEDLEY_ALWAYS_INLINE JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_NEVER_INLINE) + #undef JSON_HEDLEY_NEVER_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(noinline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute__((__noinline__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(10,2,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("noinline") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("FUNC_CANNOT_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("inline=never") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute((noinline)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#else + #define JSON_HEDLEY_NEVER_INLINE +#endif + +#if defined(JSON_HEDLEY_PRIVATE) + #undef JSON_HEDLEY_PRIVATE +#endif +#if defined(JSON_HEDLEY_PUBLIC) + #undef JSON_HEDLEY_PUBLIC +#endif +#if defined(JSON_HEDLEY_IMPORT) + #undef JSON_HEDLEY_IMPORT +#endif +#if defined(_WIN32) || defined(__CYGWIN__) +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC __declspec(dllexport) +# define JSON_HEDLEY_IMPORT __declspec(dllimport) +#else +# if \ + JSON_HEDLEY_HAS_ATTRIBUTE(visibility) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + ( \ + defined(__TI_EABI__) && \ + ( \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) \ + ) \ + ) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PRIVATE __attribute__((__visibility__("hidden"))) +# define JSON_HEDLEY_PUBLIC __attribute__((__visibility__("default"))) +# else +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC +# endif +# define JSON_HEDLEY_IMPORT extern +#endif + +#if defined(JSON_HEDLEY_NO_THROW) + #undef JSON_HEDLEY_NO_THROW +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nothrow) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_THROW __attribute__((__nothrow__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NO_THROW __declspec(nothrow) +#else + #define JSON_HEDLEY_NO_THROW +#endif + +#if defined(JSON_HEDLEY_FALL_THROUGH) + #undef JSON_HEDLEY_FALL_THROUGH +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(fallthrough) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]]) +#elif defined(__fallthrough) /* SAL */ + #define JSON_HEDLEY_FALL_THROUGH __fallthrough +#else + #define JSON_HEDLEY_FALL_THROUGH +#endif + +#if defined(JSON_HEDLEY_RETURNS_NON_NULL) + #undef JSON_HEDLEY_RETURNS_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(returns_nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RETURNS_NON_NULL __attribute__((__returns_nonnull__)) +#elif defined(_Ret_notnull_) /* SAL */ + #define JSON_HEDLEY_RETURNS_NON_NULL _Ret_notnull_ +#else + #define JSON_HEDLEY_RETURNS_NON_NULL +#endif + +#if defined(JSON_HEDLEY_ARRAY_PARAM) + #undef JSON_HEDLEY_ARRAY_PARAM +#endif +#if \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ + !defined(__STDC_NO_VLA__) && \ + !defined(__cplusplus) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_ARRAY_PARAM(name) (name) +#else + #define JSON_HEDLEY_ARRAY_PARAM(name) +#endif + +#if defined(JSON_HEDLEY_IS_CONSTANT) + #undef JSON_HEDLEY_IS_CONSTANT +#endif +#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR) + #undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#endif +/* JSON_HEDLEY_IS_CONSTEXPR_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #undef JSON_HEDLEY_IS_CONSTEXPR_ +#endif +#if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,19) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr) +#endif +#if !defined(__cplusplus) +# if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_types_compatible_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) +#else + #include <stdint.h> + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) +#endif +# elif \ + ( \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && \ + !defined(JSON_HEDLEY_SUNPRO_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION)) || \ + (JSON_HEDLEY_HAS_EXTENSION(c_generic_selections) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) +#else + #include <stdint.h> + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) +#endif +# elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + defined(JSON_HEDLEY_INTEL_VERSION) || \ + defined(JSON_HEDLEY_TINYC_VERSION) || \ + defined(JSON_HEDLEY_TI_ARMCL_VERSION) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(18,12,0) || \ + defined(JSON_HEDLEY_TI_CL2000_VERSION) || \ + defined(JSON_HEDLEY_TI_CL6X_VERSION) || \ + defined(JSON_HEDLEY_TI_CL7X_VERSION) || \ + defined(JSON_HEDLEY_TI_CLPRU_VERSION) || \ + defined(__clang__) +# define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \ + sizeof(void) != \ + sizeof(*( \ + 1 ? \ + ((void*) ((expr) * 0L) ) : \ +((struct { char v[sizeof(void) * 2]; } *) 1) \ + ) \ + ) \ + ) +# endif +#endif +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1)) +#else + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) (0) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (expr) +#endif + +#if defined(JSON_HEDLEY_BEGIN_C_DECLS) + #undef JSON_HEDLEY_BEGIN_C_DECLS +#endif +#if defined(JSON_HEDLEY_END_C_DECLS) + #undef JSON_HEDLEY_END_C_DECLS +#endif +#if defined(JSON_HEDLEY_C_DECL) + #undef JSON_HEDLEY_C_DECL +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_BEGIN_C_DECLS extern "C" { + #define JSON_HEDLEY_END_C_DECLS } + #define JSON_HEDLEY_C_DECL extern "C" +#else + #define JSON_HEDLEY_BEGIN_C_DECLS + #define JSON_HEDLEY_END_C_DECLS + #define JSON_HEDLEY_C_DECL +#endif + +#if defined(JSON_HEDLEY_STATIC_ASSERT) + #undef JSON_HEDLEY_STATIC_ASSERT +#endif +#if \ + !defined(__cplusplus) && ( \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) || \ + (JSON_HEDLEY_HAS_FEATURE(c_static_assert) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(6,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + defined(_Static_assert) \ + ) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message) +#elif \ + (defined(__cplusplus) && (__cplusplus >= 201103L)) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message)) +#else +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) +#endif + +#if defined(JSON_HEDLEY_NULL) + #undef JSON_HEDLEY_NULL +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr) + #elif defined(NULL) + #define JSON_HEDLEY_NULL NULL + #else + #define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0) + #endif +#elif defined(NULL) + #define JSON_HEDLEY_NULL NULL +#else + #define JSON_HEDLEY_NULL ((void*) 0) +#endif + +#if defined(JSON_HEDLEY_MESSAGE) + #undef JSON_HEDLEY_MESSAGE +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_MESSAGE(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(message msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message msg) +#elif JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(_CRI message msg) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_WARNING) + #undef JSON_HEDLEY_WARNING +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_WARNING(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(clang warning msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,8,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(GCC warning msg) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_REQUIRE) + #undef JSON_HEDLEY_REQUIRE +#endif +#if defined(JSON_HEDLEY_REQUIRE_MSG) + #undef JSON_HEDLEY_REQUIRE_MSG +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if) +# if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat") +# define JSON_HEDLEY_REQUIRE(expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), #expr, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), msg, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error"))) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error"))) +# endif +#else +# define JSON_HEDLEY_REQUIRE(expr) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) +#endif + +#if defined(JSON_HEDLEY_FLAGS) + #undef JSON_HEDLEY_FLAGS +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(flag_enum) && (!defined(__cplusplus) || JSON_HEDLEY_HAS_WARNING("-Wbitfield-enum-conversion")) + #define JSON_HEDLEY_FLAGS __attribute__((__flag_enum__)) +#else + #define JSON_HEDLEY_FLAGS +#endif + +#if defined(JSON_HEDLEY_FLAGS_CAST) + #undef JSON_HEDLEY_FLAGS_CAST +#endif +#if JSON_HEDLEY_INTEL_VERSION_CHECK(19,0,0) +# define JSON_HEDLEY_FLAGS_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("warning(disable:188)") \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr) +#endif + +#if defined(JSON_HEDLEY_EMPTY_BASES) + #undef JSON_HEDLEY_EMPTY_BASES +#endif +#if \ + (JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0)) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases) +#else + #define JSON_HEDLEY_EMPTY_BASES +#endif + +/* Remaining macros are deprecated. */ + +#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#endif +#if defined(__clang__) + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) (0) +#else + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_CLANG_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_BUILTIN) + #undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#endif +#define JSON_HEDLEY_CLANG_HAS_BUILTIN(builtin) JSON_HEDLEY_HAS_BUILTIN(builtin) + +#if defined(JSON_HEDLEY_CLANG_HAS_FEATURE) + #undef JSON_HEDLEY_CLANG_HAS_FEATURE +#endif +#define JSON_HEDLEY_CLANG_HAS_FEATURE(feature) JSON_HEDLEY_HAS_FEATURE(feature) + +#if defined(JSON_HEDLEY_CLANG_HAS_EXTENSION) + #undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#endif +#define JSON_HEDLEY_CLANG_HAS_EXTENSION(extension) JSON_HEDLEY_HAS_EXTENSION(extension) + +#if defined(JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_DECLSPEC_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_WARNING) + #undef JSON_HEDLEY_CLANG_HAS_WARNING +#endif +#define JSON_HEDLEY_CLANG_HAS_WARNING(warning) JSON_HEDLEY_HAS_WARNING(warning) + +#endif /* !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < X) */ + +// #include <nlohmann/detail/meta/detected.hpp> + + +#include <type_traits> + +// #include <nlohmann/detail/meta/void_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename ...Ts> struct make_void +{ + using type = void; +}; +template<typename ...Ts> using void_t = typename make_void<Ts...>::type; +} // namespace detail +} // namespace nlohmann + + +// https://en.cppreference.com/w/cpp/experimental/is_detected +namespace nlohmann +{ +namespace detail +{ +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + nonesuch(nonesuch const&&) = delete; + void operator=(nonesuch const&) = delete; + void operator=(nonesuch&&) = delete; +}; + +template<class Default, + class AlwaysVoid, + template<class...> class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; + +template<class Default, template<class...> class Op, class... Args> +struct detector<Default, void_t<Op<Args...>>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op<Args...>; +}; + +template<template<class...> class Op, class... Args> +using is_detected = typename detector<nonesuch, void, Op, Args...>::value_t; + +template<template<class...> class Op, class... Args> +struct is_detected_lazy : is_detected<Op, Args...> { }; + +template<template<class...> class Op, class... Args> +using detected_t = typename detector<nonesuch, void, Op, Args...>::type; + +template<class Default, template<class...> class Op, class... Args> +using detected_or = detector<Default, void, Op, Args...>; + +template<class Default, template<class...> class Op, class... Args> +using detected_or_t = typename detected_or<Default, Op, Args...>::type; + +template<class Expected, template<class...> class Op, class... Args> +using is_detected_exact = std::is_same<Expected, detected_t<Op, Args...>>; + +template<class To, template<class...> class Op, class... Args> +using is_detected_convertible = + std::is_convertible<detected_t<Op, Args...>, To>; +} // namespace detail +} // namespace nlohmann + + +// This file contains all internal macro definitions +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + +// exclude unsupported compilers +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #endif +#endif + +// C++ language standard detection +// if the user manually specified the used c++ version this is skipped +#if !defined(JSON_HAS_CPP_20) && !defined(JSON_HAS_CPP_17) && !defined(JSON_HAS_CPP_14) && !defined(JSON_HAS_CPP_11) + #if (defined(__cplusplus) && __cplusplus >= 202002L) || (defined(_MSVC_LANG) && _MSVC_LANG >= 202002L) + #define JSON_HAS_CPP_20 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) + #define JSON_HAS_CPP_14 + #endif + // the cpp 11 flag is always specified because it is the minimal required version + #define JSON_HAS_CPP_11 +#endif + +#ifdef __has_include + #if __has_include(<version>) + #include <version> + #endif +#endif + +#if !defined(JSON_HAS_FILESYSTEM) && !defined(JSON_HAS_EXPERIMENTAL_FILESYSTEM) + #ifdef JSON_HAS_CPP_17 + #if defined(__cpp_lib_filesystem) + #define JSON_HAS_FILESYSTEM 1 + #elif defined(__cpp_lib_experimental_filesystem) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif !defined(__has_include) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif __has_include(<filesystem>) + #define JSON_HAS_FILESYSTEM 1 + #elif __has_include(<experimental/filesystem>) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #endif + + // std::filesystem does not work on MinGW GCC 8: https://sourceforge.net/p/mingw-w64/bugs/737/ + #if defined(__MINGW32__) && defined(__GNUC__) && __GNUC__ == 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before GCC 8: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before Clang 7: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__clang_major__) && __clang_major__ < 7 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before MSVC 19.14: https://en.cppreference.com/w/cpp/compiler_support + #if defined(_MSC_VER) && _MSC_VER < 1914 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before iOS 13 + #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED < 130000 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before macOS Catalina + #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101500 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + #endif +#endif + +#ifndef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_FILESYSTEM + #define JSON_HAS_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_THREE_WAY_COMPARISON + #if defined(__cpp_impl_three_way_comparison) && __cpp_impl_three_way_comparison >= 201907L \ + && defined(__cpp_lib_three_way_comparison) && __cpp_lib_three_way_comparison >= 201907L + #define JSON_HAS_THREE_WAY_COMPARISON 1 + #else + #define JSON_HAS_THREE_WAY_COMPARISON 0 + #endif +#endif + +#ifndef JSON_HAS_RANGES + // ranges header shipping in GCC 11.1.0 (released 2021-04-27) has syntax error + #if defined(__GLIBCXX__) && __GLIBCXX__ == 20210427 + #define JSON_HAS_RANGES 0 + #elif defined(__cpp_lib_ranges) + #define JSON_HAS_RANGES 1 + #else + #define JSON_HAS_RANGES 0 + #endif +#endif + +#ifdef JSON_HAS_CPP_17 + #define JSON_INLINE_VARIABLE inline +#else + #define JSON_INLINE_VARIABLE +#endif + +#if JSON_HEDLEY_HAS_ATTRIBUTE(no_unique_address) + #define JSON_NO_UNIQUE_ADDRESS [[no_unique_address]] +#else + #define JSON_NO_UNIQUE_ADDRESS +#endif + +// disable documentation warnings on clang +#if defined(__clang__) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdocumentation" + #pragma clang diagnostic ignored "-Wdocumentation-unknown-command" +#endif + +// allow disabling exceptions +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) + #define JSON_THROW(exception) throw exception + #define JSON_TRY try + #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) +#else + #include <cstdlib> + #define JSON_THROW(exception) std::abort() + #define JSON_TRY if(true) + #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER +#endif + +// allow overriding assert +#if !defined(JSON_ASSERT) + #include <cassert> // assert + #define JSON_ASSERT(x) assert(x) +#endif + +// allow to access some private functions (needed by the test suite) +#if defined(JSON_TESTS_PRIVATE) + #define JSON_PRIVATE_UNLESS_TESTED public +#else + #define JSON_PRIVATE_UNLESS_TESTED private +#endif + +/*! +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 +*/ +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template<typename BasicJsonType> \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template<typename BasicJsonType> \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [&j](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } + +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. + +#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ + template<template<typename, typename, typename...> class ObjectType, \ + template<typename, typename...> class ArrayType, \ + class StringType, class BooleanType, class NumberIntegerType, \ + class NumberUnsignedType, class NumberFloatType, \ + template<typename> class AllocatorType, \ + template<typename, typename = void> class JSONSerializer, \ + class BinaryType> + +#define NLOHMANN_BASIC_JSON_TPL \ + basic_json<ObjectType, ArrayType, StringType, BooleanType, \ + NumberIntegerType, NumberUnsignedType, NumberFloatType, \ + AllocatorType, JSONSerializer, BinaryType> + +// Macros to simplify conversion from/to types + +#define NLOHMANN_JSON_EXPAND( x ) x +#define NLOHMANN_JSON_GET_MACRO(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, NAME,...) NAME +#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \ + NLOHMANN_JSON_PASTE64, \ + NLOHMANN_JSON_PASTE63, \ + NLOHMANN_JSON_PASTE62, \ + NLOHMANN_JSON_PASTE61, \ + NLOHMANN_JSON_PASTE60, \ + NLOHMANN_JSON_PASTE59, \ + NLOHMANN_JSON_PASTE58, \ + NLOHMANN_JSON_PASTE57, \ + NLOHMANN_JSON_PASTE56, \ + NLOHMANN_JSON_PASTE55, \ + NLOHMANN_JSON_PASTE54, \ + NLOHMANN_JSON_PASTE53, \ + NLOHMANN_JSON_PASTE52, \ + NLOHMANN_JSON_PASTE51, \ + NLOHMANN_JSON_PASTE50, \ + NLOHMANN_JSON_PASTE49, \ + NLOHMANN_JSON_PASTE48, \ + NLOHMANN_JSON_PASTE47, \ + NLOHMANN_JSON_PASTE46, \ + NLOHMANN_JSON_PASTE45, \ + NLOHMANN_JSON_PASTE44, \ + NLOHMANN_JSON_PASTE43, \ + NLOHMANN_JSON_PASTE42, \ + NLOHMANN_JSON_PASTE41, \ + NLOHMANN_JSON_PASTE40, \ + NLOHMANN_JSON_PASTE39, \ + NLOHMANN_JSON_PASTE38, \ + NLOHMANN_JSON_PASTE37, \ + NLOHMANN_JSON_PASTE36, \ + NLOHMANN_JSON_PASTE35, \ + NLOHMANN_JSON_PASTE34, \ + NLOHMANN_JSON_PASTE33, \ + NLOHMANN_JSON_PASTE32, \ + NLOHMANN_JSON_PASTE31, \ + NLOHMANN_JSON_PASTE30, \ + NLOHMANN_JSON_PASTE29, \ + NLOHMANN_JSON_PASTE28, \ + NLOHMANN_JSON_PASTE27, \ + NLOHMANN_JSON_PASTE26, \ + NLOHMANN_JSON_PASTE25, \ + NLOHMANN_JSON_PASTE24, \ + NLOHMANN_JSON_PASTE23, \ + NLOHMANN_JSON_PASTE22, \ + NLOHMANN_JSON_PASTE21, \ + NLOHMANN_JSON_PASTE20, \ + NLOHMANN_JSON_PASTE19, \ + NLOHMANN_JSON_PASTE18, \ + NLOHMANN_JSON_PASTE17, \ + NLOHMANN_JSON_PASTE16, \ + NLOHMANN_JSON_PASTE15, \ + NLOHMANN_JSON_PASTE14, \ + NLOHMANN_JSON_PASTE13, \ + NLOHMANN_JSON_PASTE12, \ + NLOHMANN_JSON_PASTE11, \ + NLOHMANN_JSON_PASTE10, \ + NLOHMANN_JSON_PASTE9, \ + NLOHMANN_JSON_PASTE8, \ + NLOHMANN_JSON_PASTE7, \ + NLOHMANN_JSON_PASTE6, \ + NLOHMANN_JSON_PASTE5, \ + NLOHMANN_JSON_PASTE4, \ + NLOHMANN_JSON_PASTE3, \ + NLOHMANN_JSON_PASTE2, \ + NLOHMANN_JSON_PASTE1)(__VA_ARGS__)) +#define NLOHMANN_JSON_PASTE2(func, v1) func(v1) +#define NLOHMANN_JSON_PASTE3(func, v1, v2) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE2(func, v2) +#define NLOHMANN_JSON_PASTE4(func, v1, v2, v3) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE3(func, v2, v3) +#define NLOHMANN_JSON_PASTE5(func, v1, v2, v3, v4) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE4(func, v2, v3, v4) +#define NLOHMANN_JSON_PASTE6(func, v1, v2, v3, v4, v5) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE5(func, v2, v3, v4, v5) +#define NLOHMANN_JSON_PASTE7(func, v1, v2, v3, v4, v5, v6) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE6(func, v2, v3, v4, v5, v6) +#define NLOHMANN_JSON_PASTE8(func, v1, v2, v3, v4, v5, v6, v7) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE7(func, v2, v3, v4, v5, v6, v7) +#define NLOHMANN_JSON_PASTE9(func, v1, v2, v3, v4, v5, v6, v7, v8) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE8(func, v2, v3, v4, v5, v6, v7, v8) +#define NLOHMANN_JSON_PASTE10(func, v1, v2, v3, v4, v5, v6, v7, v8, v9) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE9(func, v2, v3, v4, v5, v6, v7, v8, v9) +#define NLOHMANN_JSON_PASTE11(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE10(func, v2, v3, v4, v5, v6, v7, v8, v9, v10) +#define NLOHMANN_JSON_PASTE12(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE11(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) +#define NLOHMANN_JSON_PASTE13(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE12(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) +#define NLOHMANN_JSON_PASTE14(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE13(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) +#define NLOHMANN_JSON_PASTE15(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE14(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) +#define NLOHMANN_JSON_PASTE16(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE15(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) +#define NLOHMANN_JSON_PASTE17(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE16(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) +#define NLOHMANN_JSON_PASTE18(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE17(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) +#define NLOHMANN_JSON_PASTE19(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE18(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) +#define NLOHMANN_JSON_PASTE20(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE19(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) +#define NLOHMANN_JSON_PASTE21(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE20(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) +#define NLOHMANN_JSON_PASTE22(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE21(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) +#define NLOHMANN_JSON_PASTE23(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE22(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) +#define NLOHMANN_JSON_PASTE24(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE23(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) +#define NLOHMANN_JSON_PASTE25(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE24(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) +#define NLOHMANN_JSON_PASTE26(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE25(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) +#define NLOHMANN_JSON_PASTE27(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE26(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) +#define NLOHMANN_JSON_PASTE28(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE27(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) +#define NLOHMANN_JSON_PASTE29(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE28(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) +#define NLOHMANN_JSON_PASTE30(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE29(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) +#define NLOHMANN_JSON_PASTE31(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE30(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) +#define NLOHMANN_JSON_PASTE32(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE31(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) +#define NLOHMANN_JSON_PASTE33(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE32(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) +#define NLOHMANN_JSON_PASTE34(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE33(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) +#define NLOHMANN_JSON_PASTE35(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE34(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) +#define NLOHMANN_JSON_PASTE36(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE35(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) +#define NLOHMANN_JSON_PASTE37(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE36(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) +#define NLOHMANN_JSON_PASTE38(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE37(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) +#define NLOHMANN_JSON_PASTE39(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE38(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) +#define NLOHMANN_JSON_PASTE40(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE39(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) +#define NLOHMANN_JSON_PASTE41(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE40(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) +#define NLOHMANN_JSON_PASTE42(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE41(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) +#define NLOHMANN_JSON_PASTE43(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE42(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) +#define NLOHMANN_JSON_PASTE44(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE43(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) +#define NLOHMANN_JSON_PASTE45(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE44(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) +#define NLOHMANN_JSON_PASTE46(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE45(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) +#define NLOHMANN_JSON_PASTE47(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE46(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) +#define NLOHMANN_JSON_PASTE48(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE47(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) +#define NLOHMANN_JSON_PASTE49(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE48(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) +#define NLOHMANN_JSON_PASTE50(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE49(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) +#define NLOHMANN_JSON_PASTE51(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE50(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) +#define NLOHMANN_JSON_PASTE52(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE51(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) +#define NLOHMANN_JSON_PASTE53(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE52(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) +#define NLOHMANN_JSON_PASTE54(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE53(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) +#define NLOHMANN_JSON_PASTE55(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE54(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) +#define NLOHMANN_JSON_PASTE56(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE55(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) +#define NLOHMANN_JSON_PASTE57(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE56(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) +#define NLOHMANN_JSON_PASTE58(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE57(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) +#define NLOHMANN_JSON_PASTE59(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE58(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) +#define NLOHMANN_JSON_PASTE60(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE59(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) +#define NLOHMANN_JSON_PASTE61(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE60(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) +#define NLOHMANN_JSON_PASTE62(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE61(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) +#define NLOHMANN_JSON_PASTE63(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE62(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) +#define NLOHMANN_JSON_PASTE64(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE63(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) + +#define NLOHMANN_JSON_TO(v1) nlohmann_json_j[#v1] = nlohmann_json_t.v1; +#define NLOHMANN_JSON_FROM(v1) nlohmann_json_j.at(#v1).get_to(nlohmann_json_t.v1); +#define NLOHMANN_JSON_FROM_WITH_DEFAULT(v1) nlohmann_json_t.v1 = nlohmann_json_j.value(#v1, nlohmann_json_default_obj.v1); + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + + +// inspired from https://stackoverflow.com/a/26745591 +// allows to call any std function as if (e.g. with begin): +// using std::begin; begin(x); +// +// it allows using the detected idiom to retrieve the return type +// of such an expression +#define NLOHMANN_CAN_CALL_STD_FUNC_IMPL(std_name) \ + namespace detail { \ + using std::std_name; \ + \ + template<typename... T> \ + using result_of_##std_name = decltype(std_name(std::declval<T>()...)); \ + } \ + \ + namespace detail2 { \ + struct std_name##_tag \ + { \ + }; \ + \ + template<typename... T> \ + std_name##_tag std_name(T&&...); \ + \ + template<typename... T> \ + using result_of_##std_name = decltype(std_name(std::declval<T>()...)); \ + \ + template<typename... T> \ + struct would_call_std_##std_name \ + { \ + static constexpr auto const value = ::nlohmann::detail:: \ + is_detected_exact<std_name##_tag, result_of_##std_name, T...>::value; \ + }; \ + } /* namespace detail2 */ \ + \ + template<typename... T> \ + struct would_call_std_##std_name : detail2::would_call_std_##std_name<T...> \ + { \ + } + +#ifndef JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_USE_IMPLICIT_CONVERSIONS 1 +#endif + +#if JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_EXPLICIT +#else + #define JSON_EXPLICIT explicit +#endif + +#ifndef JSON_DIAGNOSTICS + #define JSON_DIAGNOSTICS 0 +#endif + +#ifndef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + #define JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON 0 +#endif + +#if JSON_HAS_THREE_WAY_COMPARISON + #include <compare> // partial_ordering +#endif + +namespace nlohmann +{ +namespace detail +{ +/////////////////////////// +// JSON type enumeration // +/////////////////////////// + +/*! +@brief the JSON type enumeration + +This enumeration collects the different JSON types. It is internally used to +distinguish the stored values, and the functions @ref basic_json::is_null(), +@ref basic_json::is_object(), @ref basic_json::is_array(), +@ref basic_json::is_string(), @ref basic_json::is_boolean(), +@ref basic_json::is_number() (with @ref basic_json::is_number_integer(), +@ref basic_json::is_number_unsigned(), and @ref basic_json::is_number_float()), +@ref basic_json::is_discarded(), @ref basic_json::is_primitive(), and +@ref basic_json::is_structured() rely on it. + +@note There are three enumeration entries (number_integer, number_unsigned, and +number_float), because the library distinguishes these three types for numbers: +@ref basic_json::number_unsigned_t is used for unsigned integers, +@ref basic_json::number_integer_t is used for signed integers, and +@ref basic_json::number_float_t is used for floating-point numbers or to +approximate integers which do not fit in the limits of their respective type. + +@sa see @ref basic_json::basic_json(const value_t value_type) -- create a JSON +value with the default value for a given type + +@since version 1.0.0 +*/ +enum class value_t : std::uint8_t +{ + null, ///< null value + object, ///< object (unordered set of name/value pairs) + array, ///< array (ordered collection of values) + string, ///< string value + boolean, ///< boolean value + number_integer, ///< number value (signed integer) + number_unsigned, ///< number value (unsigned integer) + number_float, ///< number value (floating-point) + binary, ///< binary array (ordered collection of bytes) + discarded ///< discarded by the parser callback function +}; + +/*! +@brief comparison operator for JSON types + +Returns an ordering that is similar to Python: +- order: null < boolean < number < object < array < string < binary +- furthermore, each type is not smaller than itself +- discarded values are not comparable +- binary is represented as a b"" string in python and directly comparable to a + string; however, making a binary array directly comparable with a string would + be surprising behavior in a JSON file. + +@since version 1.0.0 +*/ +#if JSON_HAS_THREE_WAY_COMPARISON + inline std::partial_ordering operator<=>(const value_t lhs, const value_t rhs) noexcept // *NOPAD* +#else + inline bool operator<(const value_t lhs, const value_t rhs) noexcept +#endif +{ + static constexpr std::array<std::uint8_t, 9> order = {{ + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */, + 6 /* binary */ + } + }; + + const auto l_index = static_cast<std::size_t>(lhs); + const auto r_index = static_cast<std::size_t>(rhs); +#if JSON_HAS_THREE_WAY_COMPARISON + if (l_index < order.size() && r_index < order.size()) + { + return order[l_index] <=> order[r_index]; // *NOPAD* + } + return std::partial_ordering::unordered; +#else + return l_index < order.size() && r_index < order.size() && order[l_index] < order[r_index]; +#endif +} + +// GCC selects the built-in operator< over an operator rewritten from +// a user-defined spaceship operator +// Clang, MSVC, and ICC select the rewritten candidate +// (see GCC bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105200) +#if JSON_HAS_THREE_WAY_COMPARISON && defined(__GNUC__) +inline bool operator<(const value_t lhs, const value_t rhs) noexcept +{ + return std::is_lt(lhs <=> rhs); // *NOPAD* +} +#endif +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_escape.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/*! +@brief replace all occurrences of a substring by another string + +@param[in,out] s the string to manipulate; changed so that all + occurrences of @a f are replaced with @a t +@param[in] f the substring to replace with @a t +@param[in] t the string to replace @a f + +@pre The search string @a f must not be empty. **This precondition is +enforced with an assertion.** + +@since version 2.0.0 +*/ +template<typename StringType> +inline void replace_substring(StringType& s, const StringType& f, + const StringType& t) +{ + JSON_ASSERT(!f.empty()); + for (auto pos = s.find(f); // find first occurrence of f + pos != StringType::npos; // make sure f was found + s.replace(pos, f.size(), t), // replace with t, and + pos = s.find(f, pos + t.size())) // find next occurrence of f + {} +} + +/*! + * @brief string escaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to escape + * @return escaped string + * + * Note the order of escaping "~" to "~0" and "/" to "~1" is important. + */ +template<typename StringType> +inline StringType escape(StringType s) +{ + replace_substring(s, StringType{"~"}, StringType{"~0"}); + replace_substring(s, StringType{"/"}, StringType{"~1"}); + return s; +} + +/*! + * @brief string unescaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to unescape + * @return unescaped string + * + * Note the order of escaping "~1" to "/" and "~0" to "~" is important. + */ +template<typename StringType> +static void unescape(StringType& s) +{ + replace_substring(s, StringType{"~1"}, StringType{"/"}); + replace_substring(s, StringType{"~0"}, StringType{"~"}); +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/position_t.hpp> + + +#include <cstddef> // size_t + +namespace nlohmann +{ +namespace detail +{ +/// struct to capture the start position of the current token +struct position_t +{ + /// the total number of characters read + std::size_t chars_read_total = 0; + /// the number of characters read in the current line + std::size_t chars_read_current_line = 0; + /// the number of lines read + std::size_t lines_read = 0; + + /// conversion to size_t to preserve SAX interface + constexpr operator size_t() const + { + return chars_read_total; + } +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + + +#include <cstddef> // size_t +#include <type_traits> // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type +#include <utility> // index_sequence, make_index_sequence, index_sequence_for + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +template<typename T> +using uncvref_t = typename std::remove_cv<typename std::remove_reference<T>::type>::type; + +#ifdef JSON_HAS_CPP_14 + +// the following utilities are natively available in C++14 +using std::enable_if_t; +using std::index_sequence; +using std::make_index_sequence; +using std::index_sequence_for; + +#else + +// alias templates to reduce boilerplate +template<bool B, typename T = void> +using enable_if_t = typename std::enable_if<B, T>::type; + +// The following code is taken from https://github.com/abseil/abseil-cpp/blob/10cb35e459f5ecca5b2ff107635da0bfa41011b4/absl/utility/utility.h +// which is part of Google Abseil (https://github.com/abseil/abseil-cpp), licensed under the Apache License 2.0. + +//// START OF CODE FROM GOOGLE ABSEIL + +// integer_sequence +// +// Class template representing a compile-time integer sequence. An instantiation +// of `integer_sequence<T, Ints...>` has a sequence of integers encoded in its +// type through its template arguments (which is a common need when +// working with C++11 variadic templates). `absl::integer_sequence` is designed +// to be a drop-in replacement for C++14's `std::integer_sequence`. +// +// Example: +// +// template< class T, T... Ints > +// void user_function(integer_sequence<T, Ints...>); +// +// int main() +// { +// // user_function's `T` will be deduced to `int` and `Ints...` +// // will be deduced to `0, 1, 2, 3, 4`. +// user_function(make_integer_sequence<int, 5>()); +// } +template <typename T, T... Ints> +struct integer_sequence +{ + using value_type = T; + static constexpr std::size_t size() noexcept + { + return sizeof...(Ints); + } +}; + +// index_sequence +// +// A helper template for an `integer_sequence` of `size_t`, +// `absl::index_sequence` is designed to be a drop-in replacement for C++14's +// `std::index_sequence`. +template <size_t... Ints> +using index_sequence = integer_sequence<size_t, Ints...>; + +namespace utility_internal +{ + +template <typename Seq, size_t SeqSize, size_t Rem> +struct Extend; + +// Note that SeqSize == sizeof...(Ints). It's passed explicitly for efficiency. +template <typename T, T... Ints, size_t SeqSize> +struct Extend<integer_sequence<T, Ints...>, SeqSize, 0> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)... >; +}; + +template <typename T, T... Ints, size_t SeqSize> +struct Extend<integer_sequence<T, Ints...>, SeqSize, 1> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)..., 2 * SeqSize >; +}; + +// Recursion helper for 'make_integer_sequence<T, N>'. +// 'Gen<T, N>::type' is an alias for 'integer_sequence<T, 0, 1, ... N-1>'. +template <typename T, size_t N> +struct Gen +{ + using type = + typename Extend < typename Gen < T, N / 2 >::type, N / 2, N % 2 >::type; +}; + +template <typename T> +struct Gen<T, 0> +{ + using type = integer_sequence<T>; +}; + +} // namespace utility_internal + +// Compile-time sequences of integers + +// make_integer_sequence +// +// This template alias is equivalent to +// `integer_sequence<int, 0, 1, ..., N-1>`, and is designed to be a drop-in +// replacement for C++14's `std::make_integer_sequence`. +template <typename T, T N> +using make_integer_sequence = typename utility_internal::Gen<T, N>::type; + +// make_index_sequence +// +// This template alias is equivalent to `index_sequence<0, 1, ..., N-1>`, +// and is designed to be a drop-in replacement for C++14's +// `std::make_index_sequence`. +template <size_t N> +using make_index_sequence = make_integer_sequence<size_t, N>; + +// index_sequence_for +// +// Converts a typename pack into an index sequence of the same length, and +// is designed to be a drop-in replacement for C++14's +// `std::index_sequence_for()` +template <typename... Ts> +using index_sequence_for = make_index_sequence<sizeof...(Ts)>; + +//// END OF CODE FROM GOOGLE ABSEIL + +#endif + +// dispatch utility (taken from ranges-v3) +template<unsigned N> struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template<typename T> +struct static_const +{ + static constexpr T value{}; +}; + +#ifndef JSON_HAS_CPP_17 + + template<typename T> + constexpr T static_const<T>::value; // NOLINT(readability-redundant-declaration) + +#endif + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +#include <limits> // numeric_limits +#include <type_traits> // false_type, is_constructible, is_integral, is_same, true_type +#include <utility> // declval +#include <tuple> // tuple + +// #include <nlohmann/detail/macro_scope.hpp> + + +// #include <nlohmann/detail/iterators/iterator_traits.hpp> + + +#include <iterator> // random_access_iterator_tag + +// #include <nlohmann/detail/meta/void_t.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename It, typename = void> +struct iterator_types {}; + +template<typename It> +struct iterator_types < + It, + void_t<typename It::difference_type, typename It::value_type, typename It::pointer, + typename It::reference, typename It::iterator_category >> +{ + using difference_type = typename It::difference_type; + using value_type = typename It::value_type; + using pointer = typename It::pointer; + using reference = typename It::reference; + using iterator_category = typename It::iterator_category; +}; + +// This is required as some compilers implement std::iterator_traits in a way that +// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. +template<typename T, typename = void> +struct iterator_traits +{ +}; + +template<typename T> +struct iterator_traits < T, enable_if_t < !std::is_pointer<T>::value >> + : iterator_types<T> +{ +}; + +template<typename T> +struct iterator_traits<T*, enable_if_t<std::is_object<T>::value>> +{ + using iterator_category = std::random_access_iterator_tag; + using value_type = T; + using difference_type = ptrdiff_t; + using pointer = T*; + using reference = T&; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/call_std/begin.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(begin); +} // namespace nlohmann + +// #include <nlohmann/detail/meta/call_std/end.hpp> + + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(end); +} // namespace nlohmann + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/detected.hpp> + +// #include <nlohmann/json_fwd.hpp> +#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_ +#define INCLUDE_NLOHMANN_JSON_FWD_HPP_ + +#include <cstdint> // int64_t, uint64_t +#include <map> // map +#include <memory> // allocator +#include <string> // string +#include <vector> // vector + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ +/*! +@brief default JSONSerializer template argument + +This serializer ignores the template arguments and uses ADL +([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) +for serialization. +*/ +template<typename T = void, typename SFINAE = void> +struct adl_serializer; + +/// a class to store JSON values +/// @sa https://json.nlohmann.me/api/basic_json/ +template<template<typename U, typename V, typename... Args> class ObjectType = + std::map, + template<typename U, typename... Args> class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template<typename U> class AllocatorType = std::allocator, + template<typename T, typename SFINAE = void> class JSONSerializer = + adl_serializer, + class BinaryType = std::vector<std::uint8_t>> +class basic_json; + +/// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document +/// @sa https://json.nlohmann.me/api/json_pointer/ +template<typename BasicJsonType> +class json_pointer; + +/*! +@brief default specialization +@sa https://json.nlohmann.me/api/json/ +*/ +using json = basic_json<>; + +/// @brief a minimal map-like container that preserves insertion order +/// @sa https://json.nlohmann.me/api/ordered_map/ +template<class Key, class T, class IgnoredLess, class Allocator> +struct ordered_map; + +/// @brief specialization that maintains the insertion order of object keys +/// @sa https://json.nlohmann.me/api/ordered_json/ +using ordered_json = basic_json<nlohmann::ordered_map>; + +} // namespace nlohmann + +#endif // INCLUDE_NLOHMANN_JSON_FWD_HPP_ + + +namespace nlohmann +{ +/*! +@brief detail namespace with internal helper functions + +This namespace collects functions that should not be exposed, +implementations of some @ref basic_json methods, and meta-programming helpers. + +@since version 2.1.0 +*/ +namespace detail +{ +///////////// +// helpers // +///////////// + +// Note to maintainers: +// +// Every trait in this file expects a non CV-qualified type. +// The only exceptions are in the 'aliases for detected' section +// (i.e. those of the form: decltype(T::member_function(std::declval<T>()))) +// +// In this case, T has to be properly CV-qualified to constraint the function arguments +// (e.g. to_json(BasicJsonType&, const T&)) + +template<typename> struct is_basic_json : std::false_type {}; + +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct is_basic_json<NLOHMANN_BASIC_JSON_TPL> : std::true_type {}; + +// used by exceptions create() member functions +// true_type for pointer to possibly cv-qualified basic_json or std::nullptr_t +// false_type otherwise +template<typename BasicJsonContext> +struct is_basic_json_context : + std::integral_constant < bool, + is_basic_json<typename std::remove_cv<typename std::remove_pointer<BasicJsonContext>::type>::type>::value + || std::is_same<BasicJsonContext, std::nullptr_t>::value > +{}; + +////////////////////// +// json_ref helpers // +////////////////////// + +template<typename> +class json_ref; + +template<typename> +struct is_json_ref : std::false_type {}; + +template<typename T> +struct is_json_ref<json_ref<T>> : std::true_type {}; + +////////////////////////// +// aliases for detected // +////////////////////////// + +template<typename T> +using mapped_type_t = typename T::mapped_type; + +template<typename T> +using key_type_t = typename T::key_type; + +template<typename T> +using value_type_t = typename T::value_type; + +template<typename T> +using difference_type_t = typename T::difference_type; + +template<typename T> +using pointer_t = typename T::pointer; + +template<typename T> +using reference_t = typename T::reference; + +template<typename T> +using iterator_category_t = typename T::iterator_category; + +template<typename T, typename... Args> +using to_json_function = decltype(T::to_json(std::declval<Args>()...)); + +template<typename T, typename... Args> +using from_json_function = decltype(T::from_json(std::declval<Args>()...)); + +template<typename T, typename U> +using get_template_function = decltype(std::declval<T>().template get<U>()); + +// trait checking if JSONSerializer<T>::from_json(json const&, udt&) exists +template<typename BasicJsonType, typename T, typename = void> +struct has_from_json : std::false_type {}; + +// trait checking if j.get<T> is valid +// use this trait instead of std::is_constructible or std::is_convertible, +// both rely on, or make use of implicit conversions, and thus fail when T +// has several constructors/operator= (see https://github.com/nlohmann/json/issues/958) +template <typename BasicJsonType, typename T> +struct is_getable +{ + static constexpr bool value = is_detected<get_template_function, const BasicJsonType&, T>::value; +}; + +template<typename BasicJsonType, typename T> +struct has_from_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<void, from_json_function, serializer, + const BasicJsonType&, T&>::value; +}; + +// This trait checks if JSONSerializer<T>::from_json(json const&) exists +// this overload is used for non-default-constructible user-defined-types +template<typename BasicJsonType, typename T, typename = void> +struct has_non_default_from_json : std::false_type {}; + +template<typename BasicJsonType, typename T> +struct has_non_default_from_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<T, from_json_function, serializer, + const BasicJsonType&>::value; +}; + +// This trait checks if BasicJsonType::json_serializer<T>::to_json exists +// Do not evaluate the trait when T is a basic_json type, to avoid template instantiation infinite recursion. +template<typename BasicJsonType, typename T, typename = void> +struct has_to_json : std::false_type {}; + +template<typename BasicJsonType, typename T> +struct has_to_json < BasicJsonType, T, enable_if_t < !is_basic_json<T>::value >> +{ + using serializer = typename BasicJsonType::template json_serializer<T, void>; + + static constexpr bool value = + is_detected_exact<void, to_json_function, serializer, BasicJsonType&, + T>::value; +}; + +template<typename T> +using detect_key_compare = typename T::key_compare; + +template<typename T> +struct has_key_compare : std::integral_constant<bool, is_detected<detect_key_compare, T>::value> {}; + +// obtains the actual object key comparator +template<typename BasicJsonType> +struct actual_object_comparator +{ + using object_t = typename BasicJsonType::object_t; + using object_comparator_t = typename BasicJsonType::default_object_comparator_t; + using type = typename std::conditional < has_key_compare<object_t>::value, + typename object_t::key_compare, object_comparator_t>::type; +}; + +template<typename BasicJsonType> +using actual_object_comparator_t = typename actual_object_comparator<BasicJsonType>::type; + +/////////////////// +// is_ functions // +/////////////////// + +// https://en.cppreference.com/w/cpp/types/conjunction +template<class...> struct conjunction : std::true_type { }; +template<class B> struct conjunction<B> : B { }; +template<class B, class... Bn> +struct conjunction<B, Bn...> +: std::conditional<bool(B::value), conjunction<Bn...>, B>::type {}; + +// https://en.cppreference.com/w/cpp/types/negation +template<class B> struct negation : std::integral_constant < bool, !B::value > { }; + +// Reimplementation of is_constructible and is_default_constructible, due to them being broken for +// std::pair and std::tuple until LWG 2367 fix (see https://cplusplus.github.io/LWG/lwg-defects.html#2367). +// This causes compile errors in e.g. clang 3.5 or gcc 4.9. +template <typename T> +struct is_default_constructible : std::is_default_constructible<T> {}; + +template <typename T1, typename T2> +struct is_default_constructible<std::pair<T1, T2>> + : conjunction<is_default_constructible<T1>, is_default_constructible<T2>> {}; + +template <typename T1, typename T2> +struct is_default_constructible<const std::pair<T1, T2>> + : conjunction<is_default_constructible<T1>, is_default_constructible<T2>> {}; + +template <typename... Ts> +struct is_default_constructible<std::tuple<Ts...>> + : conjunction<is_default_constructible<Ts>...> {}; + +template <typename... Ts> +struct is_default_constructible<const std::tuple<Ts...>> + : conjunction<is_default_constructible<Ts>...> {}; + + +template <typename T, typename... Args> +struct is_constructible : std::is_constructible<T, Args...> {}; + +template <typename T1, typename T2> +struct is_constructible<std::pair<T1, T2>> : is_default_constructible<std::pair<T1, T2>> {}; + +template <typename T1, typename T2> +struct is_constructible<const std::pair<T1, T2>> : is_default_constructible<const std::pair<T1, T2>> {}; + +template <typename... Ts> +struct is_constructible<std::tuple<Ts...>> : is_default_constructible<std::tuple<Ts...>> {}; + +template <typename... Ts> +struct is_constructible<const std::tuple<Ts...>> : is_default_constructible<const std::tuple<Ts...>> {}; + + +template<typename T, typename = void> +struct is_iterator_traits : std::false_type {}; + +template<typename T> +struct is_iterator_traits<iterator_traits<T>> +{ + private: + using traits = iterator_traits<T>; + + public: + static constexpr auto value = + is_detected<value_type_t, traits>::value && + is_detected<difference_type_t, traits>::value && + is_detected<pointer_t, traits>::value && + is_detected<iterator_category_t, traits>::value && + is_detected<reference_t, traits>::value; +}; + +template<typename T> +struct is_range +{ + private: + using t_ref = typename std::add_lvalue_reference<T>::type; + + using iterator = detected_t<result_of_begin, t_ref>; + using sentinel = detected_t<result_of_end, t_ref>; + + // to be 100% correct, it should use https://en.cppreference.com/w/cpp/iterator/input_or_output_iterator + // and https://en.cppreference.com/w/cpp/iterator/sentinel_for + // but reimplementing these would be too much work, as a lot of other concepts are used underneath + static constexpr auto is_iterator_begin = + is_iterator_traits<iterator_traits<iterator>>::value; + + public: + static constexpr bool value = !std::is_same<iterator, nonesuch>::value && !std::is_same<sentinel, nonesuch>::value && is_iterator_begin; +}; + +template<typename R> +using iterator_t = enable_if_t<is_range<R>::value, result_of_begin<decltype(std::declval<R&>())>>; + +template<typename T> +using range_value_t = value_type_t<iterator_traits<iterator_t<T>>>; + +// The following implementation of is_complete_type is taken from +// https://blogs.msdn.microsoft.com/vcblog/2015/12/02/partial-support-for-expression-sfinae-in-vs-2015-update-1/ +// and is written by Xiang Fan who agreed to using it in this library. + +template<typename T, typename = void> +struct is_complete_type : std::false_type {}; + +template<typename T> +struct is_complete_type<T, decltype(void(sizeof(T)))> : std::true_type {}; + +template<typename BasicJsonType, typename CompatibleObjectType, + typename = void> +struct is_compatible_object_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename CompatibleObjectType> +struct is_compatible_object_type_impl < + BasicJsonType, CompatibleObjectType, + enable_if_t < is_detected<mapped_type_t, CompatibleObjectType>::value&& + is_detected<key_type_t, CompatibleObjectType>::value >> +{ + using object_t = typename BasicJsonType::object_t; + + // macOS's is_constructible does not play well with nonesuch... + static constexpr bool value = + is_constructible<typename object_t::key_type, + typename CompatibleObjectType::key_type>::value && + is_constructible<typename object_t::mapped_type, + typename CompatibleObjectType::mapped_type>::value; +}; + +template<typename BasicJsonType, typename CompatibleObjectType> +struct is_compatible_object_type + : is_compatible_object_type_impl<BasicJsonType, CompatibleObjectType> {}; + +template<typename BasicJsonType, typename ConstructibleObjectType, + typename = void> +struct is_constructible_object_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename ConstructibleObjectType> +struct is_constructible_object_type_impl < + BasicJsonType, ConstructibleObjectType, + enable_if_t < is_detected<mapped_type_t, ConstructibleObjectType>::value&& + is_detected<key_type_t, ConstructibleObjectType>::value >> +{ + using object_t = typename BasicJsonType::object_t; + + static constexpr bool value = + (is_default_constructible<ConstructibleObjectType>::value && + (std::is_move_assignable<ConstructibleObjectType>::value || + std::is_copy_assignable<ConstructibleObjectType>::value) && + (is_constructible<typename ConstructibleObjectType::key_type, + typename object_t::key_type>::value && + std::is_same < + typename object_t::mapped_type, + typename ConstructibleObjectType::mapped_type >::value)) || + (has_from_json<BasicJsonType, + typename ConstructibleObjectType::mapped_type>::value || + has_non_default_from_json < + BasicJsonType, + typename ConstructibleObjectType::mapped_type >::value); +}; + +template<typename BasicJsonType, typename ConstructibleObjectType> +struct is_constructible_object_type + : is_constructible_object_type_impl<BasicJsonType, + ConstructibleObjectType> {}; + +template<typename BasicJsonType, typename CompatibleStringType> +struct is_compatible_string_type +{ + static constexpr auto value = + is_constructible<typename BasicJsonType::string_t, CompatibleStringType>::value; +}; + +template<typename BasicJsonType, typename ConstructibleStringType> +struct is_constructible_string_type +{ + // launder type through decltype() to fix compilation failure on ICPC +#ifdef __INTEL_COMPILER + using laundered_type = decltype(std::declval<ConstructibleStringType>()); +#else + using laundered_type = ConstructibleStringType; +#endif + + static constexpr auto value = + is_constructible<laundered_type, + typename BasicJsonType::string_t>::value; +}; + +template<typename BasicJsonType, typename CompatibleArrayType, typename = void> +struct is_compatible_array_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename CompatibleArrayType> +struct is_compatible_array_type_impl < + BasicJsonType, CompatibleArrayType, + enable_if_t < + is_detected<iterator_t, CompatibleArrayType>::value&& + is_iterator_traits<iterator_traits<detected_t<iterator_t, CompatibleArrayType>>>::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 + !std::is_same<CompatibleArrayType, detected_t<range_value_t, CompatibleArrayType>>::value >> +{ + static constexpr bool value = + is_constructible<BasicJsonType, + range_value_t<CompatibleArrayType>>::value; +}; + +template<typename BasicJsonType, typename CompatibleArrayType> +struct is_compatible_array_type + : is_compatible_array_type_impl<BasicJsonType, CompatibleArrayType> {}; + +template<typename BasicJsonType, typename ConstructibleArrayType, typename = void> +struct is_constructible_array_type_impl : std::false_type {}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t<std::is_same<ConstructibleArrayType, + typename BasicJsonType::value_type>::value >> + : std::true_type {}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t < !std::is_same<ConstructibleArrayType, + typename BasicJsonType::value_type>::value&& + !is_compatible_string_type<BasicJsonType, ConstructibleArrayType>::value&& + is_default_constructible<ConstructibleArrayType>::value&& +(std::is_move_assignable<ConstructibleArrayType>::value || + std::is_copy_assignable<ConstructibleArrayType>::value)&& +is_detected<iterator_t, ConstructibleArrayType>::value&& +is_iterator_traits<iterator_traits<detected_t<iterator_t, ConstructibleArrayType>>>::value&& +is_detected<range_value_t, ConstructibleArrayType>::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 +!std::is_same<ConstructibleArrayType, detected_t<range_value_t, ConstructibleArrayType>>::value&& + is_complete_type < + detected_t<range_value_t, ConstructibleArrayType >>::value >> +{ + using value_type = range_value_t<ConstructibleArrayType>; + + static constexpr bool value = + std::is_same<value_type, + typename BasicJsonType::array_t::value_type>::value || + has_from_json<BasicJsonType, + value_type>::value || + has_non_default_from_json < + BasicJsonType, + value_type >::value; +}; + +template<typename BasicJsonType, typename ConstructibleArrayType> +struct is_constructible_array_type + : is_constructible_array_type_impl<BasicJsonType, ConstructibleArrayType> {}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType, + typename = void> +struct is_compatible_integer_type_impl : std::false_type {}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType> +struct is_compatible_integer_type_impl < + RealIntegerType, CompatibleNumberIntegerType, + enable_if_t < std::is_integral<RealIntegerType>::value&& + std::is_integral<CompatibleNumberIntegerType>::value&& + !std::is_same<bool, CompatibleNumberIntegerType>::value >> +{ + // is there an assert somewhere on overflows? + using RealLimits = std::numeric_limits<RealIntegerType>; + using CompatibleLimits = std::numeric_limits<CompatibleNumberIntegerType>; + + static constexpr auto value = + is_constructible<RealIntegerType, + CompatibleNumberIntegerType>::value && + CompatibleLimits::is_integer && + RealLimits::is_signed == CompatibleLimits::is_signed; +}; + +template<typename RealIntegerType, typename CompatibleNumberIntegerType> +struct is_compatible_integer_type + : is_compatible_integer_type_impl<RealIntegerType, + CompatibleNumberIntegerType> {}; + +template<typename BasicJsonType, typename CompatibleType, typename = void> +struct is_compatible_type_impl: std::false_type {}; + +template<typename BasicJsonType, typename CompatibleType> +struct is_compatible_type_impl < + BasicJsonType, CompatibleType, + enable_if_t<is_complete_type<CompatibleType>::value >> +{ + static constexpr bool value = + has_to_json<BasicJsonType, CompatibleType>::value; +}; + +template<typename BasicJsonType, typename CompatibleType> +struct is_compatible_type + : is_compatible_type_impl<BasicJsonType, CompatibleType> {}; + +template<typename T1, typename T2> +struct is_constructible_tuple : std::false_type {}; + +template<typename T1, typename... Args> +struct is_constructible_tuple<T1, std::tuple<Args...>> : conjunction<is_constructible<T1, Args>...> {}; + +template<typename BasicJsonType, typename T> +struct is_json_iterator_of : std::false_type {}; + +template<typename BasicJsonType> +struct is_json_iterator_of<BasicJsonType, typename BasicJsonType::iterator> : std::true_type {}; + +template<typename BasicJsonType> +struct is_json_iterator_of<BasicJsonType, typename BasicJsonType::const_iterator> : std::true_type +{}; + +// checks if a given type T is a template specialization of Primary +template<template <typename...> class Primary, typename T> +struct is_specialization_of : std::false_type {}; + +template<template <typename...> class Primary, typename... Args> +struct is_specialization_of<Primary, Primary<Args...>> : std::true_type {}; + +template<typename T> +using is_json_pointer = is_specialization_of<::nlohmann::json_pointer, uncvref_t<T>>; + +// checks if A and B are comparable using Compare functor +template<typename Compare, typename A, typename B, typename = void> +struct is_comparable : std::false_type {}; + +template<typename Compare, typename A, typename B> +struct is_comparable<Compare, A, B, void_t< +decltype(std::declval<Compare>()(std::declval<A>(), std::declval<B>())), +decltype(std::declval<Compare>()(std::declval<B>(), std::declval<A>())) +>> : std::true_type {}; + +// checks if BasicJsonType::object_t::key_type and KeyType are comparable using Compare functor +template<typename BasicJsonType, typename KeyType> +using is_key_type_comparable = typename is_comparable < + typename BasicJsonType::object_comparator_t, + const key_type_t<typename BasicJsonType::object_t>&, + KeyType >::type; + +template<typename T> +using detect_is_transparent = typename T::is_transparent; + +// type trait to check if KeyType can be used as object key +// true if: +// - KeyType is comparable with BasicJsonType::object_t::key_type +// - if ExcludeObjectKeyType is true, KeyType is not BasicJsonType::object_t::key_type +// - the comparator is transparent or RequireTransparentComparator is false +// - KeyType is not a JSON iterator or json_pointer +template<typename BasicJsonType, typename KeyTypeCVRef, bool RequireTransparentComparator = true, + bool ExcludeObjectKeyType = RequireTransparentComparator, typename KeyType = uncvref_t<KeyTypeCVRef>> +using is_usable_as_key_type = typename std::conditional < + is_key_type_comparable<BasicJsonType, KeyTypeCVRef>::value + && !(ExcludeObjectKeyType && std::is_same<KeyType, + typename BasicJsonType::object_t::key_type>::value) + && (!RequireTransparentComparator || is_detected < + detect_is_transparent, + typename BasicJsonType::object_comparator_t >::value) + && !is_json_iterator_of<BasicJsonType, KeyType>::value + && !is_json_pointer<KeyType>::value, + std::true_type, + std::false_type >::type; + +template<typename ObjectType, typename KeyType> +using detect_erase_with_key_type = decltype(std::declval<ObjectType&>().erase(std::declval<KeyType>())); + +// type trait to check if object_t has an erase() member functions accepting KeyType +template<typename BasicJsonType, typename KeyType> +using has_erase_with_key_type = typename std::conditional < + is_detected < + detect_erase_with_key_type, + typename BasicJsonType::object_t, KeyType >::value, + std::true_type, + std::false_type >::type; + +// a naive helper to check if a type is an ordered_map (exploits the fact that +// ordered_map inherits capacity() from std::vector) +template <typename T> +struct is_ordered_map +{ + using one = char; + + struct two + { + char x[2]; // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + }; + + template <typename C> static one test( decltype(&C::capacity) ) ; + template <typename C> static two test(...); + + enum { value = sizeof(test<T>(nullptr)) == sizeof(char) }; // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) +}; + +// to avoid useless casts (see https://github.com/nlohmann/json/issues/2893#issuecomment-889152324) +template < typename T, typename U, enable_if_t < !std::is_same<T, U>::value, int > = 0 > +T conditional_static_cast(U value) +{ + return static_cast<T>(value); +} + +template<typename T, typename U, enable_if_t<std::is_same<T, U>::value, int> = 0> +T conditional_static_cast(U value) +{ + return value; +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_concat.hpp> + + +#include <cstring> // strlen +#include <string> // string +#include <utility> // forward + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/detected.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +inline std::size_t concat_length() +{ + return 0; +} + +template<typename... Args> +inline std::size_t concat_length(const char* cstr, Args&& ... rest); + +template<typename StringType, typename... Args> +inline std::size_t concat_length(const StringType& str, Args&& ... rest); + +template<typename... Args> +inline std::size_t concat_length(const char /*c*/, Args&& ... rest) +{ + return 1 + concat_length(std::forward<Args>(rest)...); +} + +template<typename... Args> +inline std::size_t concat_length(const char* cstr, Args&& ... rest) +{ + // cppcheck-suppress ignoredReturnValue + return ::strlen(cstr) + concat_length(std::forward<Args>(rest)...); +} + +template<typename StringType, typename... Args> +inline std::size_t concat_length(const StringType& str, Args&& ... rest) +{ + return str.size() + concat_length(std::forward<Args>(rest)...); +} + +template<typename OutStringType> +inline void concat_into(OutStringType& /*out*/) +{} + +template<typename StringType, typename Arg> +using string_can_append = decltype(std::declval<StringType&>().append(std::declval < Arg && > ())); + +template<typename StringType, typename Arg> +using detect_string_can_append = is_detected<string_can_append, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_op = decltype(std::declval<StringType&>() += std::declval < Arg && > ()); + +template<typename StringType, typename Arg> +using detect_string_can_append_op = is_detected<string_can_append_op, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_iter = decltype(std::declval<StringType&>().append(std::declval<const Arg&>().begin(), std::declval<const Arg&>().end())); + +template<typename StringType, typename Arg> +using detect_string_can_append_iter = is_detected<string_can_append_iter, StringType, Arg>; + +template<typename StringType, typename Arg> +using string_can_append_data = decltype(std::declval<StringType&>().append(std::declval<const Arg&>().data(), std::declval<const Arg&>().size())); + +template<typename StringType, typename Arg> +using detect_string_can_append_data = is_detected<string_can_append_data, StringType, Arg>; + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && detect_string_can_append_op<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, Arg && arg, Args && ... rest); + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && detect_string_can_append_iter<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, const Arg& arg, Args && ... rest); + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && !detect_string_can_append_iter<OutStringType, Arg>::value + && detect_string_can_append_data<OutStringType, Arg>::value, int > = 0 > +inline void concat_into(OutStringType& out, const Arg& arg, Args && ... rest); + +template<typename OutStringType, typename Arg, typename... Args, + enable_if_t<detect_string_can_append<OutStringType, Arg>::value, int> = 0> +inline void concat_into(OutStringType& out, Arg && arg, Args && ... rest) +{ + out.append(std::forward<Arg>(arg)); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && detect_string_can_append_op<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, Arg&& arg, Args&& ... rest) +{ + out += std::forward<Arg>(arg); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && detect_string_can_append_iter<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, const Arg& arg, Args&& ... rest) +{ + out.append(arg.begin(), arg.end()); + concat_into(out, std::forward<Args>(rest)...); +} + +template < typename OutStringType, typename Arg, typename... Args, + enable_if_t < !detect_string_can_append<OutStringType, Arg>::value + && !detect_string_can_append_op<OutStringType, Arg>::value + && !detect_string_can_append_iter<OutStringType, Arg>::value + && detect_string_can_append_data<OutStringType, Arg>::value, int > > +inline void concat_into(OutStringType& out, const Arg& arg, Args&& ... rest) +{ + out.append(arg.data(), arg.size()); + concat_into(out, std::forward<Args>(rest)...); +} + +template<typename OutStringType = std::string, typename... Args> +inline OutStringType concat(Args && ... args) +{ + OutStringType str; + str.reserve(concat_length(std::forward<Args>(args)...)); + concat_into(str, std::forward<Args>(args)...); + return str; +} + +} // namespace detail +} // namespace nlohmann + + + +namespace nlohmann +{ +namespace detail +{ +//////////////// +// exceptions // +//////////////// + +/// @brief general exception of the @ref basic_json class +/// @sa https://json.nlohmann.me/api/basic_json/exception/ +class exception : public std::exception +{ + public: + /// returns the explanatory string + const char* what() const noexcept override + { + return m.what(); + } + + /// the id of the exception + const int id; // NOLINT(cppcoreguidelines-non-private-member-variables-in-classes) + + protected: + JSON_HEDLEY_NON_NULL(3) + exception(int id_, const char* what_arg) : id(id_), m(what_arg) {} // NOLINT(bugprone-throw-keyword-missing) + + static std::string name(const std::string& ename, int id_) + { + return concat("[json.exception.", ename, '.', std::to_string(id_), "] "); + } + + static std::string diagnostics(std::nullptr_t /*leaf_element*/) + { + return ""; + } + + template<typename BasicJsonType> + static std::string diagnostics(const BasicJsonType* leaf_element) + { +#if JSON_DIAGNOSTICS + std::vector<std::string> tokens; + for (const auto* current = leaf_element; current != nullptr && current->m_parent != nullptr; current = current->m_parent) + { + switch (current->m_parent->type()) + { + case value_t::array: + { + for (std::size_t i = 0; i < current->m_parent->m_value.array->size(); ++i) + { + if (¤t->m_parent->m_value.array->operator[](i) == current) + { + tokens.emplace_back(std::to_string(i)); + break; + } + } + break; + } + + case value_t::object: + { + for (const auto& element : *current->m_parent->m_value.object) + { + if (&element.second == current) + { + tokens.emplace_back(element.first.c_str()); + break; + } + } + break; + } + + case value_t::null: // LCOV_EXCL_LINE + case value_t::string: // LCOV_EXCL_LINE + case value_t::boolean: // LCOV_EXCL_LINE + case value_t::number_integer: // LCOV_EXCL_LINE + case value_t::number_unsigned: // LCOV_EXCL_LINE + case value_t::number_float: // LCOV_EXCL_LINE + case value_t::binary: // LCOV_EXCL_LINE + case value_t::discarded: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + break; // LCOV_EXCL_LINE + } + } + + if (tokens.empty()) + { + return ""; + } + + auto str = std::accumulate(tokens.rbegin(), tokens.rend(), std::string{}, + [](const std::string & a, const std::string & b) + { + return concat(a, '/', detail::escape(b)); + }); + return concat('(', str, ") "); +#else + static_cast<void>(leaf_element); + return ""; +#endif + } + + private: + /// an exception object as storage for error messages + std::runtime_error m; +}; + +/// @brief exception indicating a parse error +/// @sa https://json.nlohmann.me/api/basic_json/parse_error/ +class parse_error : public exception +{ + public: + /*! + @brief create a parse error exception + @param[in] id_ the id of the exception + @param[in] pos the position where the error occurred (or with + chars_read_total=0 if the position cannot be + determined) + @param[in] what_arg the explanatory string + @return parse_error object + */ + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static parse_error create(int id_, const position_t& pos, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("parse_error", id_), "parse error", + position_string(pos), ": ", exception::diagnostics(context), what_arg); + return {id_, pos.chars_read_total, w.c_str()}; + } + + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static parse_error create(int id_, std::size_t byte_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("parse_error", id_), "parse error", + (byte_ != 0 ? (concat(" at byte ", std::to_string(byte_))) : ""), + ": ", exception::diagnostics(context), what_arg); + return {id_, byte_, w.c_str()}; + } + + /*! + @brief byte index of the parse error + + The byte index of the last read character in the input file. + + @note For an input with n bytes, 1 is the index of the first character and + n+1 is the index of the terminating null byte or the end of file. + This also holds true when reading a byte vector (CBOR or MessagePack). + */ + const std::size_t byte; + + private: + parse_error(int id_, std::size_t byte_, const char* what_arg) + : exception(id_, what_arg), byte(byte_) {} + + static std::string position_string(const position_t& pos) + { + return concat(" at line ", std::to_string(pos.lines_read + 1), + ", column ", std::to_string(pos.chars_read_current_line)); + } +}; + +/// @brief exception indicating errors with iterators +/// @sa https://json.nlohmann.me/api/basic_json/invalid_iterator/ +class invalid_iterator : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static invalid_iterator create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("invalid_iterator", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + invalid_iterator(int id_, const char* what_arg) + : exception(id_, what_arg) {} +}; + +/// @brief exception indicating executing a member function with a wrong type +/// @sa https://json.nlohmann.me/api/basic_json/type_error/ +class type_error : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static type_error create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("type_error", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + type_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/// @brief exception indicating access out of the defined range +/// @sa https://json.nlohmann.me/api/basic_json/out_of_range/ +class out_of_range : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static out_of_range create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("out_of_range", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + out_of_range(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/// @brief exception indicating other library errors +/// @sa https://json.nlohmann.me/api/basic_json/other_error/ +class other_error : public exception +{ + public: + template<typename BasicJsonContext, enable_if_t<is_basic_json_context<BasicJsonContext>::value, int> = 0> + static other_error create(int id_, const std::string& what_arg, BasicJsonContext context) + { + std::string w = concat(exception::name("other_error", id_), exception::diagnostics(context), what_arg); + return {id_, w.c_str()}; + } + + private: + JSON_HEDLEY_NON_NULL(3) + other_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/identity_tag.hpp> + + +namespace nlohmann +{ +namespace detail +{ +// dispatching helper struct +template <class T> struct identity_tag {}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +#if JSON_HAS_EXPERIMENTAL_FILESYSTEM +#include <experimental/filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::experimental::filesystem; +} // namespace nlohmann::detail +#elif JSON_HAS_FILESYSTEM +#include <filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::filesystem; +} // namespace nlohmann::detail +#endif + +namespace nlohmann +{ +namespace detail +{ +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename std::nullptr_t& n) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_null())) + { + JSON_THROW(type_error::create(302, concat("type must be null, but is ", j.type_name()), &j)); + } + n = nullptr; +} + +// overloads for basic_json template parameters +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < std::is_arithmetic<ArithmeticType>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value, + int > = 0 > +void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast<value_t>(j)) + { + case value_t::number_unsigned: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_unsigned_t*>()); + break; + } + case value_t::number_integer: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_integer_t*>()); + break; + } + case value_t::number_float: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_float_t*>()); + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::string: + case value_t::boolean: + case value_t::binary: + case value_t::discarded: + default: + JSON_THROW(type_error::create(302, concat("type must be number, but is ", j.type_name()), &j)); + } +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::boolean_t& b) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_boolean())) + { + JSON_THROW(type_error::create(302, concat("type must be boolean, but is ", j.type_name()), &j)); + } + b = *j.template get_ptr<const typename BasicJsonType::boolean_t*>(); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::string_t& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + s = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} + +template < + typename BasicJsonType, typename StringType, + enable_if_t < + std::is_assignable<StringType&, const typename BasicJsonType::string_t>::value + && !std::is_same<typename BasicJsonType::string_t, StringType>::value + && !is_json_ref<StringType>::value, int > = 0 > +inline void from_json(const BasicJsonType& j, StringType& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + + s = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_float_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_unsigned_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::number_integer_t& val) +{ + get_arithmetic_value(j, val); +} + +template<typename BasicJsonType, typename EnumType, + enable_if_t<std::is_enum<EnumType>::value, int> = 0> +inline void from_json(const BasicJsonType& j, EnumType& e) +{ + typename std::underlying_type<EnumType>::type val; + get_arithmetic_value(j, val); + e = static_cast<EnumType>(val); +} + +// forward_list doesn't have an insert method +template<typename BasicJsonType, typename T, typename Allocator, + enable_if_t<is_getable<BasicJsonType, T>::value, int> = 0> +inline void from_json(const BasicJsonType& j, std::forward_list<T, Allocator>& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + l.clear(); + std::transform(j.rbegin(), j.rend(), + std::front_inserter(l), [](const BasicJsonType & i) + { + return i.template get<T>(); + }); +} + +// valarray doesn't have an insert method +template<typename BasicJsonType, typename T, + enable_if_t<is_getable<BasicJsonType, T>::value, int> = 0> +inline void from_json(const BasicJsonType& j, std::valarray<T>& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + l.resize(j.size()); + std::transform(j.begin(), j.end(), std::begin(l), + [](const BasicJsonType & elem) + { + return elem.template get<T>(); + }); +} + +template<typename BasicJsonType, typename T, std::size_t N> +auto from_json(const BasicJsonType& j, T (&arr)[N]) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +-> decltype(j.template get<T>(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get<T>(); + } +} + +template<typename BasicJsonType> +inline void from_json_array_impl(const BasicJsonType& j, typename BasicJsonType::array_t& arr, priority_tag<3> /*unused*/) +{ + arr = *j.template get_ptr<const typename BasicJsonType::array_t*>(); +} + +template<typename BasicJsonType, typename T, std::size_t N> +auto from_json_array_impl(const BasicJsonType& j, std::array<T, N>& arr, + priority_tag<2> /*unused*/) +-> decltype(j.template get<T>(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get<T>(); + } +} + +template<typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t< + std::is_assignable<ConstructibleArrayType&, ConstructibleArrayType>::value, + int> = 0> +auto from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, priority_tag<1> /*unused*/) +-> decltype( + arr.reserve(std::declval<typename ConstructibleArrayType::size_type>()), + j.template get<typename ConstructibleArrayType::value_type>(), + void()) +{ + using std::end; + + ConstructibleArrayType ret; + ret.reserve(j.size()); + std::transform(j.begin(), j.end(), + std::inserter(ret, end(ret)), [](const BasicJsonType & i) + { + // get<BasicJsonType>() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get<typename ConstructibleArrayType::value_type>(); + }); + arr = std::move(ret); +} + +template<typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t< + std::is_assignable<ConstructibleArrayType&, ConstructibleArrayType>::value, + int> = 0> +inline void from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, + priority_tag<0> /*unused*/) +{ + using std::end; + + ConstructibleArrayType ret; + std::transform( + j.begin(), j.end(), std::inserter(ret, end(ret)), + [](const BasicJsonType & i) + { + // get<BasicJsonType>() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get<typename ConstructibleArrayType::value_type>(); + }); + arr = std::move(ret); +} + +template < typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t < + is_constructible_array_type<BasicJsonType, ConstructibleArrayType>::value&& + !is_constructible_object_type<BasicJsonType, ConstructibleArrayType>::value&& + !is_constructible_string_type<BasicJsonType, ConstructibleArrayType>::value&& + !std::is_same<ConstructibleArrayType, typename BasicJsonType::binary_t>::value&& + !is_basic_json<ConstructibleArrayType>::value, + int > = 0 > +auto from_json(const BasicJsonType& j, ConstructibleArrayType& arr) +-> decltype(from_json_array_impl(j, arr, priority_tag<3> {}), +j.template get<typename ConstructibleArrayType::value_type>(), +void()) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + from_json_array_impl(j, arr, priority_tag<3> {}); +} + +template < typename BasicJsonType, typename T, std::size_t... Idx > +std::array<T, sizeof...(Idx)> from_json_inplace_array_impl(BasicJsonType&& j, + identity_tag<std::array<T, sizeof...(Idx)>> /*unused*/, index_sequence<Idx...> /*unused*/) +{ + return { { std::forward<BasicJsonType>(j).at(Idx).template get<T>()... } }; +} + +template < typename BasicJsonType, typename T, std::size_t N > +auto from_json(BasicJsonType&& j, identity_tag<std::array<T, N>> tag) +-> decltype(from_json_inplace_array_impl(std::forward<BasicJsonType>(j), tag, make_index_sequence<N> {})) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + return from_json_inplace_array_impl(std::forward<BasicJsonType>(j), tag, make_index_sequence<N> {}); +} + +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, typename BasicJsonType::binary_t& bin) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_binary())) + { + JSON_THROW(type_error::create(302, concat("type must be binary, but is ", j.type_name()), &j)); + } + + bin = *j.template get_ptr<const typename BasicJsonType::binary_t*>(); +} + +template<typename BasicJsonType, typename ConstructibleObjectType, + enable_if_t<is_constructible_object_type<BasicJsonType, ConstructibleObjectType>::value, int> = 0> +inline void from_json(const BasicJsonType& j, ConstructibleObjectType& obj) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_object())) + { + JSON_THROW(type_error::create(302, concat("type must be object, but is ", j.type_name()), &j)); + } + + ConstructibleObjectType ret; + const auto* inner_object = j.template get_ptr<const typename BasicJsonType::object_t*>(); + using value_type = typename ConstructibleObjectType::value_type; + std::transform( + inner_object->begin(), inner_object->end(), + std::inserter(ret, ret.begin()), + [](typename BasicJsonType::object_t::value_type const & p) + { + return value_type(p.first, p.second.template get<typename ConstructibleObjectType::mapped_type>()); + }); + obj = std::move(ret); +} + +// overload for arithmetic types, not chosen for basic_json template arguments +// (BooleanType, etc..); note: Is it really necessary to provide explicit +// overloads for boolean_t etc. in case of a custom BooleanType which is not +// an arithmetic type? +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < + std::is_arithmetic<ArithmeticType>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_unsigned_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_integer_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::number_float_t>::value&& + !std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value, + int > = 0 > +inline void from_json(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast<value_t>(j)) + { + case value_t::number_unsigned: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_unsigned_t*>()); + break; + } + case value_t::number_integer: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_integer_t*>()); + break; + } + case value_t::number_float: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::number_float_t*>()); + break; + } + case value_t::boolean: + { + val = static_cast<ArithmeticType>(*j.template get_ptr<const typename BasicJsonType::boolean_t*>()); + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::string: + case value_t::binary: + case value_t::discarded: + default: + JSON_THROW(type_error::create(302, concat("type must be number, but is ", j.type_name()), &j)); + } +} + +template<typename BasicJsonType, typename... Args, std::size_t... Idx> +std::tuple<Args...> from_json_tuple_impl_base(BasicJsonType&& j, index_sequence<Idx...> /*unused*/) +{ + return std::make_tuple(std::forward<BasicJsonType>(j).at(Idx).template get<Args>()...); +} + +template < typename BasicJsonType, class A1, class A2 > +std::pair<A1, A2> from_json_tuple_impl(BasicJsonType&& j, identity_tag<std::pair<A1, A2>> /*unused*/, priority_tag<0> /*unused*/) +{ + return {std::forward<BasicJsonType>(j).at(0).template get<A1>(), + std::forward<BasicJsonType>(j).at(1).template get<A2>()}; +} + +template<typename BasicJsonType, typename A1, typename A2> +inline void from_json_tuple_impl(BasicJsonType&& j, std::pair<A1, A2>& p, priority_tag<1> /*unused*/) +{ + p = from_json_tuple_impl(std::forward<BasicJsonType>(j), identity_tag<std::pair<A1, A2>> {}, priority_tag<0> {}); +} + +template<typename BasicJsonType, typename... Args> +std::tuple<Args...> from_json_tuple_impl(BasicJsonType&& j, identity_tag<std::tuple<Args...>> /*unused*/, priority_tag<2> /*unused*/) +{ + return from_json_tuple_impl_base<BasicJsonType, Args...>(std::forward<BasicJsonType>(j), index_sequence_for<Args...> {}); +} + +template<typename BasicJsonType, typename... Args> +inline void from_json_tuple_impl(BasicJsonType&& j, std::tuple<Args...>& t, priority_tag<3> /*unused*/) +{ + t = from_json_tuple_impl_base<BasicJsonType, Args...>(std::forward<BasicJsonType>(j), index_sequence_for<Args...> {}); +} + +template<typename BasicJsonType, typename TupleRelated> +auto from_json(BasicJsonType&& j, TupleRelated&& t) +-> decltype(from_json_tuple_impl(std::forward<BasicJsonType>(j), std::forward<TupleRelated>(t), priority_tag<3> {})) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + + return from_json_tuple_impl(std::forward<BasicJsonType>(j), std::forward<TupleRelated>(t), priority_tag<3> {}); +} + +template < typename BasicJsonType, typename Key, typename Value, typename Compare, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +inline void from_json(const BasicJsonType& j, std::map<Key, Value, Compare, Allocator>& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", p.type_name()), &j)); + } + m.emplace(p.at(0).template get<Key>(), p.at(1).template get<Value>()); + } +} + +template < typename BasicJsonType, typename Key, typename Value, typename Hash, typename KeyEqual, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +inline void from_json(const BasicJsonType& j, std::unordered_map<Key, Value, Hash, KeyEqual, Allocator>& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", j.type_name()), &j)); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, concat("type must be array, but is ", p.type_name()), &j)); + } + m.emplace(p.at(0).template get<Key>(), p.at(1).template get<Value>()); + } +} + +#if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM +template<typename BasicJsonType> +inline void from_json(const BasicJsonType& j, std_fs::path& p) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + p = *j.template get_ptr<const typename BasicJsonType::string_t*>(); +} +#endif + +struct from_json_fn +{ + template<typename BasicJsonType, typename T> + auto operator()(const BasicJsonType& j, T&& val) const + noexcept(noexcept(from_json(j, std::forward<T>(val)))) + -> decltype(from_json(j, std::forward<T>(val))) + { + return from_json(j, std::forward<T>(val)); + } +}; +} // namespace detail + +#ifndef JSON_HAS_CPP_17 +/// namespace to hold default `from_json` function +/// to see why this is required: +/// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4381.html +namespace // NOLINT(cert-dcl59-cpp,fuchsia-header-anon-namespaces,google-build-namespaces) +{ +#endif +JSON_INLINE_VARIABLE constexpr const auto& from_json = // NOLINT(misc-definitions-in-headers) + detail::static_const<detail::from_json_fn>::value; +#ifndef JSON_HAS_CPP_17 +} // namespace +#endif +} // namespace nlohmann + +// #include <nlohmann/detail/conversions/to_json.hpp> + + +#include <algorithm> // copy +#include <iterator> // begin, end +#include <string> // string +#include <tuple> // tuple, get +#include <type_traits> // is_same, is_constructible, is_floating_point, is_enum, underlying_type +#include <utility> // move, forward, declval, pair +#include <valarray> // valarray +#include <vector> // vector + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/iterators/iteration_proxy.hpp> + + +#include <cstddef> // size_t +#include <iterator> // input_iterator_tag +#include <string> // string, to_string +#include <tuple> // tuple_size, get, tuple_element +#include <utility> // move + +#if JSON_HAS_RANGES + #include <ranges> // enable_borrowed_range +#endif + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename string_type> +void int_to_string( string_type& target, std::size_t value ) +{ + // For ADL + using std::to_string; + target = to_string(value); +} +template<typename IteratorType> class iteration_proxy_value +{ + public: + using difference_type = std::ptrdiff_t; + using value_type = iteration_proxy_value; + using pointer = value_type *; + using reference = value_type &; + using iterator_category = std::input_iterator_tag; + using string_type = typename std::remove_cv< typename std::remove_reference<decltype( std::declval<IteratorType>().key() ) >::type >::type; + + private: + /// the iterator + IteratorType anchor{}; + /// an index for arrays (used to create key names) + std::size_t array_index = 0; + /// last stringified array index + mutable std::size_t array_index_last = 0; + /// a string representation of the array index + mutable string_type array_index_str = "0"; + /// an empty string (to return a reference for primitive values) + string_type empty_str{}; + + public: + explicit iteration_proxy_value() = default; + explicit iteration_proxy_value(IteratorType it, std::size_t array_index_ = 0) + noexcept(std::is_nothrow_move_constructible<IteratorType>::value + && std::is_nothrow_default_constructible<string_type>::value) + : anchor(std::move(it)) + , array_index(array_index_) + {} + + iteration_proxy_value(iteration_proxy_value const&) = default; + iteration_proxy_value& operator=(iteration_proxy_value const&) = default; + // older GCCs are a bit fussy and require explicit noexcept specifiers on defaulted functions + iteration_proxy_value(iteration_proxy_value&&) + noexcept(std::is_nothrow_move_constructible<IteratorType>::value + && std::is_nothrow_move_constructible<string_type>::value) = default; + iteration_proxy_value& operator=(iteration_proxy_value&&) + noexcept(std::is_nothrow_move_assignable<IteratorType>::value + && std::is_nothrow_move_assignable<string_type>::value) = default; + ~iteration_proxy_value() = default; + + /// dereference operator (needed for range-based for) + const iteration_proxy_value& operator*() const + { + return *this; + } + + /// increment operator (needed for range-based for) + iteration_proxy_value& operator++() + { + ++anchor; + ++array_index; + + return *this; + } + + iteration_proxy_value operator++(int)& // NOLINT(cert-dcl21-cpp) + { + auto tmp = iteration_proxy_value(anchor, array_index); + ++anchor; + ++array_index; + return tmp; + } + + /// equality operator (needed for InputIterator) + bool operator==(const iteration_proxy_value& o) const + { + return anchor == o.anchor; + } + + /// inequality operator (needed for range-based for) + bool operator!=(const iteration_proxy_value& o) const + { + return anchor != o.anchor; + } + + /// return key of the iterator + const string_type& key() const + { + JSON_ASSERT(anchor.m_object != nullptr); + + switch (anchor.m_object->type()) + { + // use integer array index as key + case value_t::array: + { + if (array_index != array_index_last) + { + int_to_string( array_index_str, array_index ); + array_index_last = array_index; + } + return array_index_str; + } + + // use key from the object + case value_t::object: + return anchor.key(); + + // use an empty key for all primitive types + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return empty_str; + } + } + + /// return value of the iterator + typename IteratorType::reference value() const + { + return anchor.value(); + } +}; + +/// proxy class for the items() function +template<typename IteratorType> class iteration_proxy +{ + private: + /// the container to iterate + typename IteratorType::pointer container = nullptr; + + public: + explicit iteration_proxy() = default; + + /// construct iteration proxy from a container + explicit iteration_proxy(typename IteratorType::reference cont) noexcept + : container(&cont) {} + + iteration_proxy(iteration_proxy const&) = default; + iteration_proxy& operator=(iteration_proxy const&) = default; + iteration_proxy(iteration_proxy&&) noexcept = default; + iteration_proxy& operator=(iteration_proxy&&) noexcept = default; + ~iteration_proxy() = default; + + /// return iterator begin (needed for range-based for) + iteration_proxy_value<IteratorType> begin() const noexcept + { + return iteration_proxy_value<IteratorType>(container->begin()); + } + + /// return iterator end (needed for range-based for) + iteration_proxy_value<IteratorType> end() const noexcept + { + return iteration_proxy_value<IteratorType>(container->end()); + } +}; + +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template<std::size_t N, typename IteratorType, enable_if_t<N == 0, int> = 0> +auto get(const nlohmann::detail::iteration_proxy_value<IteratorType>& i) -> decltype(i.key()) +{ + return i.key(); +} +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template<std::size_t N, typename IteratorType, enable_if_t<N == 1, int> = 0> +auto get(const nlohmann::detail::iteration_proxy_value<IteratorType>& i) -> decltype(i.value()) +{ + return i.value(); +} +} // namespace detail +} // namespace nlohmann + +// The Addition to the STD Namespace is required to add +// Structured Bindings Support to the iteration_proxy_value class +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +namespace std +{ +#if defined(__clang__) + // Fix: https://github.com/nlohmann/json/issues/1401 + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wmismatched-tags" +#endif +template<typename IteratorType> +class tuple_size<::nlohmann::detail::iteration_proxy_value<IteratorType>> + : public std::integral_constant<std::size_t, 2> {}; + +template<std::size_t N, typename IteratorType> +class tuple_element<N, ::nlohmann::detail::iteration_proxy_value<IteratorType >> +{ + public: + using type = decltype( + get<N>(std::declval < + ::nlohmann::detail::iteration_proxy_value<IteratorType >> ())); +}; +#if defined(__clang__) + #pragma clang diagnostic pop +#endif +} // namespace std + +#if JSON_HAS_RANGES + template <typename IteratorType> + inline constexpr bool ::std::ranges::enable_borrowed_range<::nlohmann::detail::iteration_proxy<IteratorType>> = true; +#endif + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +#if JSON_HAS_EXPERIMENTAL_FILESYSTEM +#include <experimental/filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::experimental::filesystem; +} // namespace nlohmann::detail +#elif JSON_HAS_FILESYSTEM +#include <filesystem> +namespace nlohmann::detail +{ +namespace std_fs = std::filesystem; +} // namespace nlohmann::detail +#endif + +namespace nlohmann +{ +namespace detail +{ +////////////////// +// constructors // +////////////////// + +/* + * Note all external_constructor<>::construct functions need to call + * j.m_value.destroy(j.m_type) to avoid a memory leak in case j contains an + * allocated value (e.g., a string). See bug issue + * https://github.com/nlohmann/json/issues/2865 for more information. + */ + +template<value_t> struct external_constructor; + +template<> +struct external_constructor<value_t::boolean> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::boolean_t b) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::boolean; + j.m_value = b; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::string> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::string_t& s) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value = s; + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::string_t&& s) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value = std::move(s); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleStringType, + enable_if_t < !std::is_same<CompatibleStringType, typename BasicJsonType::string_t>::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleStringType& str) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::string; + j.m_value.string = j.template create<typename BasicJsonType::string_t>(str); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::binary> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::binary_t& b) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::binary; + j.m_value = typename BasicJsonType::binary_t(b); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::binary_t&& b) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::binary; + j.m_value = typename BasicJsonType::binary_t(std::move(b)); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_float> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_float_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_float; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_unsigned> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_unsigned_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_unsigned; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::number_integer> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::number_integer_t val) noexcept + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::number_integer; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::array> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::array_t& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = arr; + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::array_t&& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = std::move(arr); + j.set_parents(); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < !std::is_same<CompatibleArrayType, typename BasicJsonType::array_t>::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleArrayType& arr) + { + using std::begin; + using std::end; + + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value.array = j.template create<typename BasicJsonType::array_t>(begin(arr), end(arr)); + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const std::vector<bool>& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->reserve(arr.size()); + for (const bool x : arr) + { + j.m_value.array->push_back(x); + j.set_parent(j.m_value.array->back()); + } + j.assert_invariant(); + } + + template<typename BasicJsonType, typename T, + enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> + static void construct(BasicJsonType& j, const std::valarray<T>& arr) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->resize(arr.size()); + if (arr.size() > 0) + { + std::copy(std::begin(arr), std::end(arr), j.m_value.array->begin()); + } + j.set_parents(); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor<value_t::object> +{ + template<typename BasicJsonType> + static void construct(BasicJsonType& j, const typename BasicJsonType::object_t& obj) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value = obj; + j.set_parents(); + j.assert_invariant(); + } + + template<typename BasicJsonType> + static void construct(BasicJsonType& j, typename BasicJsonType::object_t&& obj) + { + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value = std::move(obj); + j.set_parents(); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < !std::is_same<CompatibleObjectType, typename BasicJsonType::object_t>::value, int > = 0 > + static void construct(BasicJsonType& j, const CompatibleObjectType& obj) + { + using std::begin; + using std::end; + + j.m_value.destroy(j.m_type); + j.m_type = value_t::object; + j.m_value.object = j.template create<typename BasicJsonType::object_t>(begin(obj), end(obj)); + j.set_parents(); + j.assert_invariant(); + } +}; + +///////////// +// to_json // +///////////// + +template<typename BasicJsonType, typename T, + enable_if_t<std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0> +inline void to_json(BasicJsonType& j, T b) noexcept +{ + external_constructor<value_t::boolean>::construct(j, b); +} + +template<typename BasicJsonType, typename CompatibleString, + enable_if_t<std::is_constructible<typename BasicJsonType::string_t, CompatibleString>::value, int> = 0> +inline void to_json(BasicJsonType& j, const CompatibleString& s) +{ + external_constructor<value_t::string>::construct(j, s); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s) +{ + external_constructor<value_t::string>::construct(j, std::move(s)); +} + +template<typename BasicJsonType, typename FloatType, + enable_if_t<std::is_floating_point<FloatType>::value, int> = 0> +inline void to_json(BasicJsonType& j, FloatType val) noexcept +{ + external_constructor<value_t::number_float>::construct(j, static_cast<typename BasicJsonType::number_float_t>(val)); +} + +template<typename BasicJsonType, typename CompatibleNumberUnsignedType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t, CompatibleNumberUnsignedType>::value, int> = 0> +inline void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept +{ + external_constructor<value_t::number_unsigned>::construct(j, static_cast<typename BasicJsonType::number_unsigned_t>(val)); +} + +template<typename BasicJsonType, typename CompatibleNumberIntegerType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t, CompatibleNumberIntegerType>::value, int> = 0> +inline void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept +{ + external_constructor<value_t::number_integer>::construct(j, static_cast<typename BasicJsonType::number_integer_t>(val)); +} + +template<typename BasicJsonType, typename EnumType, + enable_if_t<std::is_enum<EnumType>::value, int> = 0> +inline void to_json(BasicJsonType& j, EnumType e) noexcept +{ + using underlying_type = typename std::underlying_type<EnumType>::type; + external_constructor<value_t::number_integer>::construct(j, static_cast<underlying_type>(e)); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const std::vector<bool>& e) +{ + external_constructor<value_t::array>::construct(j, e); +} + +template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < is_compatible_array_type<BasicJsonType, + CompatibleArrayType>::value&& + !is_compatible_object_type<BasicJsonType, CompatibleArrayType>::value&& + !is_compatible_string_type<BasicJsonType, CompatibleArrayType>::value&& + !std::is_same<typename BasicJsonType::binary_t, CompatibleArrayType>::value&& + !is_basic_json<CompatibleArrayType>::value, + int > = 0 > +inline void to_json(BasicJsonType& j, const CompatibleArrayType& arr) +{ + external_constructor<value_t::array>::construct(j, arr); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const typename BasicJsonType::binary_t& bin) +{ + external_constructor<value_t::binary>::construct(j, bin); +} + +template<typename BasicJsonType, typename T, + enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> +inline void to_json(BasicJsonType& j, const std::valarray<T>& arr) +{ + external_constructor<value_t::array>::construct(j, std::move(arr)); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr) +{ + external_constructor<value_t::array>::construct(j, std::move(arr)); +} + +template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value&& !is_basic_json<CompatibleObjectType>::value, int > = 0 > +inline void to_json(BasicJsonType& j, const CompatibleObjectType& obj) +{ + external_constructor<value_t::object>::construct(j, obj); +} + +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj) +{ + external_constructor<value_t::object>::construct(j, std::move(obj)); +} + +template < + typename BasicJsonType, typename T, std::size_t N, + enable_if_t < !std::is_constructible<typename BasicJsonType::string_t, + const T(&)[N]>::value, // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + int > = 0 > +inline void to_json(BasicJsonType& j, const T(&arr)[N]) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +{ + external_constructor<value_t::array>::construct(j, arr); +} + +template < typename BasicJsonType, typename T1, typename T2, enable_if_t < std::is_constructible<BasicJsonType, T1>::value&& std::is_constructible<BasicJsonType, T2>::value, int > = 0 > +inline void to_json(BasicJsonType& j, const std::pair<T1, T2>& p) +{ + j = { p.first, p.second }; +} + +// for https://github.com/nlohmann/json/pull/1134 +template<typename BasicJsonType, typename T, + enable_if_t<std::is_same<T, iteration_proxy_value<typename BasicJsonType::iterator>>::value, int> = 0> +inline void to_json(BasicJsonType& j, const T& b) +{ + j = { {b.key(), b.value()} }; +} + +template<typename BasicJsonType, typename Tuple, std::size_t... Idx> +inline void to_json_tuple_impl(BasicJsonType& j, const Tuple& t, index_sequence<Idx...> /*unused*/) +{ + j = { std::get<Idx>(t)... }; +} + +template<typename BasicJsonType, typename T, enable_if_t<is_constructible_tuple<BasicJsonType, T>::value, int > = 0> +inline void to_json(BasicJsonType& j, const T& t) +{ + to_json_tuple_impl(j, t, make_index_sequence<std::tuple_size<T>::value> {}); +} + +#if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM +template<typename BasicJsonType> +inline void to_json(BasicJsonType& j, const std_fs::path& p) +{ + j = p.string(); +} +#endif + +struct to_json_fn +{ + template<typename BasicJsonType, typename T> + auto operator()(BasicJsonType& j, T&& val) const noexcept(noexcept(to_json(j, std::forward<T>(val)))) + -> decltype(to_json(j, std::forward<T>(val)), void()) + { + return to_json(j, std::forward<T>(val)); + } +}; +} // namespace detail + +#ifndef JSON_HAS_CPP_17 +/// namespace to hold default `to_json` function +/// to see why this is required: +/// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4381.html +namespace // NOLINT(cert-dcl59-cpp,fuchsia-header-anon-namespaces,google-build-namespaces) +{ +#endif +JSON_INLINE_VARIABLE constexpr const auto& to_json = // NOLINT(misc-definitions-in-headers) + detail::static_const<detail::to_json_fn>::value; +#ifndef JSON_HAS_CPP_17 +} // namespace +#endif +} // namespace nlohmann + +// #include <nlohmann/detail/meta/identity_tag.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ + +/// @sa https://json.nlohmann.me/api/adl_serializer/ +template<typename ValueType, typename> +struct adl_serializer +{ + /// @brief convert a JSON value to any value type + /// @sa https://json.nlohmann.me/api/adl_serializer/from_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto from_json(BasicJsonType && j, TargetType& val) noexcept( + noexcept(::nlohmann::from_json(std::forward<BasicJsonType>(j), val))) + -> decltype(::nlohmann::from_json(std::forward<BasicJsonType>(j), val), void()) + { + ::nlohmann::from_json(std::forward<BasicJsonType>(j), val); + } + + /// @brief convert a JSON value to any value type + /// @sa https://json.nlohmann.me/api/adl_serializer/from_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto from_json(BasicJsonType && j) noexcept( + noexcept(::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {}))) + -> decltype(::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {})) + { + return ::nlohmann::from_json(std::forward<BasicJsonType>(j), detail::identity_tag<TargetType> {}); + } + + /// @brief convert any value type to a JSON value + /// @sa https://json.nlohmann.me/api/adl_serializer/to_json/ + template<typename BasicJsonType, typename TargetType = ValueType> + static auto to_json(BasicJsonType& j, TargetType && val) noexcept( + noexcept(::nlohmann::to_json(j, std::forward<TargetType>(val)))) + -> decltype(::nlohmann::to_json(j, std::forward<TargetType>(val)), void()) + { + ::nlohmann::to_json(j, std::forward<TargetType>(val)); + } +}; +} // namespace nlohmann + +// #include <nlohmann/byte_container_with_subtype.hpp> + + +#include <cstdint> // uint8_t, uint64_t +#include <tuple> // tie +#include <utility> // move + +namespace nlohmann +{ + +/// @brief an internal type for a backed binary type +/// @sa https://json.nlohmann.me/api/byte_container_with_subtype/ +template<typename BinaryType> +class byte_container_with_subtype : public BinaryType +{ + public: + using container_type = BinaryType; + using subtype_type = std::uint64_t; + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype() noexcept(noexcept(container_type())) + : container_type() + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(const container_type& b) noexcept(noexcept(container_type(b))) + : container_type(b) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(container_type&& b) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(const container_type& b, subtype_type subtype_) noexcept(noexcept(container_type(b))) + : container_type(b) + , m_subtype(subtype_) + , m_has_subtype(true) + {} + + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/byte_container_with_subtype/ + byte_container_with_subtype(container_type&& b, subtype_type subtype_) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + , m_subtype(subtype_) + , m_has_subtype(true) + {} + + bool operator==(const byte_container_with_subtype& rhs) const + { + return std::tie(static_cast<const BinaryType&>(*this), m_subtype, m_has_subtype) == + std::tie(static_cast<const BinaryType&>(rhs), rhs.m_subtype, rhs.m_has_subtype); + } + + bool operator!=(const byte_container_with_subtype& rhs) const + { + return !(rhs == *this); + } + + /// @brief sets the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/set_subtype/ + void set_subtype(subtype_type subtype_) noexcept + { + m_subtype = subtype_; + m_has_subtype = true; + } + + /// @brief return the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/subtype/ + constexpr subtype_type subtype() const noexcept + { + return m_has_subtype ? m_subtype : static_cast<subtype_type>(-1); + } + + /// @brief return whether the value has a subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/has_subtype/ + constexpr bool has_subtype() const noexcept + { + return m_has_subtype; + } + + /// @brief clears the binary subtype + /// @sa https://json.nlohmann.me/api/byte_container_with_subtype/clear_subtype/ + void clear_subtype() noexcept + { + m_subtype = 0; + m_has_subtype = false; + } + + private: + subtype_type m_subtype = 0; + bool m_has_subtype = false; +}; + +} // namespace nlohmann + +// #include <nlohmann/detail/conversions/from_json.hpp> + +// #include <nlohmann/detail/conversions/to_json.hpp> + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/hash.hpp> + + +#include <cstdint> // uint8_t +#include <cstddef> // size_t +#include <functional> // hash + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +// boost::hash_combine +inline std::size_t combine(std::size_t seed, std::size_t h) noexcept +{ + seed ^= h + 0x9e3779b9 + (seed << 6U) + (seed >> 2U); + return seed; +} + +/*! +@brief hash a JSON value + +The hash function tries to rely on std::hash where possible. Furthermore, the +type of the JSON value is taken into account to have different hash values for +null, 0, 0U, and false, etc. + +@tparam BasicJsonType basic_json specialization +@param j JSON value to hash +@return hash value of j +*/ +template<typename BasicJsonType> +std::size_t hash(const BasicJsonType& j) +{ + using string_t = typename BasicJsonType::string_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + + const auto type = static_cast<std::size_t>(j.type()); + switch (j.type()) + { + case BasicJsonType::value_t::null: + case BasicJsonType::value_t::discarded: + { + return combine(type, 0); + } + + case BasicJsonType::value_t::object: + { + auto seed = combine(type, j.size()); + for (const auto& element : j.items()) + { + const auto h = std::hash<string_t> {}(element.key()); + seed = combine(seed, h); + seed = combine(seed, hash(element.value())); + } + return seed; + } + + case BasicJsonType::value_t::array: + { + auto seed = combine(type, j.size()); + for (const auto& element : j) + { + seed = combine(seed, hash(element)); + } + return seed; + } + + case BasicJsonType::value_t::string: + { + const auto h = std::hash<string_t> {}(j.template get_ref<const string_t&>()); + return combine(type, h); + } + + case BasicJsonType::value_t::boolean: + { + const auto h = std::hash<bool> {}(j.template get<bool>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_integer: + { + const auto h = std::hash<number_integer_t> {}(j.template get<number_integer_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_unsigned: + { + const auto h = std::hash<number_unsigned_t> {}(j.template get<number_unsigned_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_float: + { + const auto h = std::hash<number_float_t> {}(j.template get<number_float_t>()); + return combine(type, h); + } + + case BasicJsonType::value_t::binary: + { + auto seed = combine(type, j.get_binary().size()); + const auto h = std::hash<bool> {}(j.get_binary().has_subtype()); + seed = combine(seed, h); + seed = combine(seed, static_cast<std::size_t>(j.get_binary().subtype())); + for (const auto byte : j.get_binary()) + { + seed = combine(seed, std::hash<std::uint8_t> {}(byte)); + } + return seed; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return 0; // LCOV_EXCL_LINE + } +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/binary_reader.hpp> + + +#include <algorithm> // generate_n +#include <array> // array +#include <cmath> // ldexp +#include <cstddef> // size_t +#include <cstdint> // uint8_t, uint16_t, uint32_t, uint64_t +#include <cstdio> // snprintf +#include <cstring> // memcpy +#include <iterator> // back_inserter +#include <limits> // numeric_limits +#include <string> // char_traits, string +#include <utility> // make_pair, move +#include <vector> // vector +#include <map> // map + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/input/input_adapters.hpp> + + +#include <array> // array +#include <cstddef> // size_t +#include <cstring> // strlen +#include <iterator> // begin, end, iterator_traits, random_access_iterator_tag, distance, next +#include <memory> // shared_ptr, make_shared, addressof +#include <numeric> // accumulate +#include <string> // string, char_traits +#include <type_traits> // enable_if, is_base_of, is_pointer, is_integral, remove_pointer +#include <utility> // pair, declval + +#ifndef JSON_NO_IO + #include <cstdio> // FILE * + #include <istream> // istream +#endif // JSON_NO_IO + +// #include <nlohmann/detail/iterators/iterator_traits.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/// the supported input formats +enum class input_format_t { json, cbor, msgpack, ubjson, bson, bjdata }; + +//////////////////// +// input adapters // +//////////////////// + +#ifndef JSON_NO_IO +/*! +Input adapter for stdio file access. This adapter read only 1 byte and do not use any + buffer. This adapter is a very low level adapter. +*/ +class file_input_adapter +{ + public: + using char_type = char; + + JSON_HEDLEY_NON_NULL(2) + explicit file_input_adapter(std::FILE* f) noexcept + : m_file(f) + {} + + // make class move-only + file_input_adapter(const file_input_adapter&) = delete; + file_input_adapter(file_input_adapter&&) noexcept = default; + file_input_adapter& operator=(const file_input_adapter&) = delete; + file_input_adapter& operator=(file_input_adapter&&) = delete; + ~file_input_adapter() = default; + + std::char_traits<char>::int_type get_character() noexcept + { + return std::fgetc(m_file); + } + + private: + /// the file pointer to read from + std::FILE* m_file; +}; + + +/*! +Input adapter for a (caching) istream. Ignores a UFT Byte Order Mark at +beginning of input. Does not support changing the underlying std::streambuf +in mid-input. Maintains underlying std::istream and std::streambuf to support +subsequent use of standard std::istream operations to process any input +characters following those used in parsing the JSON input. Clears the +std::istream flags; any input errors (e.g., EOF) will be detected by the first +subsequent call for input from the std::istream. +*/ +class input_stream_adapter +{ + public: + using char_type = char; + + ~input_stream_adapter() + { + // clear stream flags; we use underlying streambuf I/O, do not + // maintain ifstream flags, except eof + if (is != nullptr) + { + is->clear(is->rdstate() & std::ios::eofbit); + } + } + + explicit input_stream_adapter(std::istream& i) + : is(&i), sb(i.rdbuf()) + {} + + // delete because of pointer members + input_stream_adapter(const input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&&) = delete; + + input_stream_adapter(input_stream_adapter&& rhs) noexcept + : is(rhs.is), sb(rhs.sb) + { + rhs.is = nullptr; + rhs.sb = nullptr; + } + + // std::istream/std::streambuf use std::char_traits<char>::to_int_type, to + // ensure that std::char_traits<char>::eof() and the character 0xFF do not + // end up as the same value, e.g. 0xFFFFFFFF. + std::char_traits<char>::int_type get_character() + { + auto res = sb->sbumpc(); + // set eof manually, as we don't use the istream interface. + if (JSON_HEDLEY_UNLIKELY(res == std::char_traits<char>::eof())) + { + is->clear(is->rdstate() | std::ios::eofbit); + } + return res; + } + + private: + /// the associated input stream + std::istream* is = nullptr; + std::streambuf* sb = nullptr; +}; +#endif // JSON_NO_IO + +// General-purpose iterator-based adapter. It might not be as fast as +// theoretically possible for some containers, but it is extremely versatile. +template<typename IteratorType> +class iterator_input_adapter +{ + public: + using char_type = typename std::iterator_traits<IteratorType>::value_type; + + iterator_input_adapter(IteratorType first, IteratorType last) + : current(std::move(first)), end(std::move(last)) + {} + + typename std::char_traits<char_type>::int_type get_character() + { + if (JSON_HEDLEY_LIKELY(current != end)) + { + auto result = std::char_traits<char_type>::to_int_type(*current); + std::advance(current, 1); + return result; + } + + return std::char_traits<char_type>::eof(); + } + + private: + IteratorType current; + IteratorType end; + + template<typename BaseInputAdapter, size_t T> + friend struct wide_string_input_helper; + + bool empty() const + { + return current == end; + } +}; + + +template<typename BaseInputAdapter, size_t T> +struct wide_string_input_helper; + +template<typename BaseInputAdapter> +struct wide_string_input_helper<BaseInputAdapter, 4> +{ + // UTF-32 + static void fill_buffer(BaseInputAdapter& input, + std::array<std::char_traits<char>::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits<char>::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-32 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xC0u | ((static_cast<unsigned int>(wc) >> 6u) & 0x1Fu)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (wc <= 0xFFFF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xE0u | ((static_cast<unsigned int>(wc) >> 12u) & 0x0Fu)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else if (wc <= 0x10FFFF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xF0u | ((static_cast<unsigned int>(wc) >> 18u) & 0x07u)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + // unknown character + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } +}; + +template<typename BaseInputAdapter> +struct wide_string_input_helper<BaseInputAdapter, 2> +{ + // UTF-16 + static void fill_buffer(BaseInputAdapter& input, + std::array<std::char_traits<char>::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits<char>::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-16 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xC0u | ((static_cast<unsigned int>(wc) >> 6u))); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (0xD800 > wc || wc >= 0xE000) + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xE0u | ((static_cast<unsigned int>(wc) >> 12u))); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((static_cast<unsigned int>(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | (static_cast<unsigned int>(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else + { + if (JSON_HEDLEY_UNLIKELY(!input.empty())) + { + const auto wc2 = static_cast<unsigned int>(input.get_character()); + const auto charcode = 0x10000u + (((static_cast<unsigned int>(wc) & 0x3FFu) << 10u) | (wc2 & 0x3FFu)); + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xF0u | (charcode >> 18u)); + utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((charcode >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast<std::char_traits<char>::int_type>(0x80u | ((charcode >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast<std::char_traits<char>::int_type>(0x80u | (charcode & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } + } +}; + +// Wraps another input apdater to convert wide character types into individual bytes. +template<typename BaseInputAdapter, typename WideCharType> +class wide_string_input_adapter +{ + public: + using char_type = char; + + wide_string_input_adapter(BaseInputAdapter base) + : base_adapter(base) {} + + typename std::char_traits<char>::int_type get_character() noexcept + { + // check if buffer needs to be filled + if (utf8_bytes_index == utf8_bytes_filled) + { + fill_buffer<sizeof(WideCharType)>(); + + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index == 0); + } + + // use buffer + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index < utf8_bytes_filled); + return utf8_bytes[utf8_bytes_index++]; + } + + private: + BaseInputAdapter base_adapter; + + template<size_t T> + void fill_buffer() + { + wide_string_input_helper<BaseInputAdapter, T>::fill_buffer(base_adapter, utf8_bytes, utf8_bytes_index, utf8_bytes_filled); + } + + /// a buffer for UTF-8 bytes + std::array<std::char_traits<char>::int_type, 4> utf8_bytes = {{0, 0, 0, 0}}; + + /// index to the utf8_codes array for the next valid byte + std::size_t utf8_bytes_index = 0; + /// number of valid bytes in the utf8_codes array + std::size_t utf8_bytes_filled = 0; +}; + + +template<typename IteratorType, typename Enable = void> +struct iterator_input_adapter_factory +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits<iterator_type>::value_type; + using adapter_type = iterator_input_adapter<iterator_type>; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(std::move(first), std::move(last)); + } +}; + +template<typename T> +struct is_iterator_of_multibyte +{ + using value_type = typename std::iterator_traits<T>::value_type; + enum + { + value = sizeof(value_type) > 1 + }; +}; + +template<typename IteratorType> +struct iterator_input_adapter_factory<IteratorType, enable_if_t<is_iterator_of_multibyte<IteratorType>::value>> +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits<iterator_type>::value_type; + using base_adapter_type = iterator_input_adapter<iterator_type>; + using adapter_type = wide_string_input_adapter<base_adapter_type, char_type>; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(base_adapter_type(std::move(first), std::move(last))); + } +}; + +// General purpose iterator-based input +template<typename IteratorType> +typename iterator_input_adapter_factory<IteratorType>::adapter_type input_adapter(IteratorType first, IteratorType last) +{ + using factory_type = iterator_input_adapter_factory<IteratorType>; + return factory_type::create(first, last); +} + +// Convenience shorthand from container to iterator +// Enables ADL on begin(container) and end(container) +// Encloses the using declarations in namespace for not to leak them to outside scope + +namespace container_input_adapter_factory_impl +{ + +using std::begin; +using std::end; + +template<typename ContainerType, typename Enable = void> +struct container_input_adapter_factory {}; + +template<typename ContainerType> +struct container_input_adapter_factory< ContainerType, + void_t<decltype(begin(std::declval<ContainerType>()), end(std::declval<ContainerType>()))>> + { + using adapter_type = decltype(input_adapter(begin(std::declval<ContainerType>()), end(std::declval<ContainerType>()))); + + static adapter_type create(const ContainerType& container) +{ + return input_adapter(begin(container), end(container)); +} + }; + +} // namespace container_input_adapter_factory_impl + +template<typename ContainerType> +typename container_input_adapter_factory_impl::container_input_adapter_factory<ContainerType>::adapter_type input_adapter(const ContainerType& container) +{ + return container_input_adapter_factory_impl::container_input_adapter_factory<ContainerType>::create(container); +} + +#ifndef JSON_NO_IO +// Special cases with fast paths +inline file_input_adapter input_adapter(std::FILE* file) +{ + return file_input_adapter(file); +} + +inline input_stream_adapter input_adapter(std::istream& stream) +{ + return input_stream_adapter(stream); +} + +inline input_stream_adapter input_adapter(std::istream&& stream) +{ + return input_stream_adapter(stream); +} +#endif // JSON_NO_IO + +using contiguous_bytes_input_adapter = decltype(input_adapter(std::declval<const char*>(), std::declval<const char*>())); + +// Null-delimited strings, and the like. +template < typename CharT, + typename std::enable_if < + std::is_pointer<CharT>::value&& + !std::is_array<CharT>::value&& + std::is_integral<typename std::remove_pointer<CharT>::type>::value&& + sizeof(typename std::remove_pointer<CharT>::type) == 1, + int >::type = 0 > +contiguous_bytes_input_adapter input_adapter(CharT b) +{ + auto length = std::strlen(reinterpret_cast<const char*>(b)); + const auto* ptr = reinterpret_cast<const char*>(b); + return input_adapter(ptr, ptr + length); +} + +template<typename T, std::size_t N> +auto input_adapter(T (&array)[N]) -> decltype(input_adapter(array, array + N)) // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) +{ + return input_adapter(array, array + N); +} + +// This class only handles inputs of input_buffer_adapter type. +// It's required so that expressions like {ptr, len} can be implicitly cast +// to the correct adapter. +class span_input_adapter +{ + public: + template < typename CharT, + typename std::enable_if < + std::is_pointer<CharT>::value&& + std::is_integral<typename std::remove_pointer<CharT>::type>::value&& + sizeof(typename std::remove_pointer<CharT>::type) == 1, + int >::type = 0 > + span_input_adapter(CharT b, std::size_t l) + : ia(reinterpret_cast<const char*>(b), reinterpret_cast<const char*>(b) + l) {} + + template<class IteratorType, + typename std::enable_if< + std::is_same<typename iterator_traits<IteratorType>::iterator_category, std::random_access_iterator_tag>::value, + int>::type = 0> + span_input_adapter(IteratorType first, IteratorType last) + : ia(input_adapter(first, last)) {} + + contiguous_bytes_input_adapter&& get() + { + return std::move(ia); // NOLINT(hicpp-move-const-arg,performance-move-const-arg) + } + + private: + contiguous_bytes_input_adapter ia; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/json_sax.hpp> + + +#include <cstddef> +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + + +namespace nlohmann +{ + +/*! +@brief SAX interface + +This class describes the SAX interface used by @ref nlohmann::json::sax_parse. +Each function is called in different situations while the input is parsed. The +boolean return value informs the parser whether to continue processing the +input. +*/ +template<typename BasicJsonType> +struct json_sax +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @brief a null value was read + @return whether parsing should proceed + */ + virtual bool null() = 0; + + /*! + @brief a boolean value was read + @param[in] val boolean value + @return whether parsing should proceed + */ + virtual bool boolean(bool val) = 0; + + /*! + @brief an integer number was read + @param[in] val integer value + @return whether parsing should proceed + */ + virtual bool number_integer(number_integer_t val) = 0; + + /*! + @brief an unsigned integer number was read + @param[in] val unsigned integer value + @return whether parsing should proceed + */ + virtual bool number_unsigned(number_unsigned_t val) = 0; + + /*! + @brief a floating-point number was read + @param[in] val floating-point value + @param[in] s raw token value + @return whether parsing should proceed + */ + virtual bool number_float(number_float_t val, const string_t& s) = 0; + + /*! + @brief a string value was read + @param[in] val string value + @return whether parsing should proceed + @note It is safe to move the passed string value. + */ + virtual bool string(string_t& val) = 0; + + /*! + @brief a binary value was read + @param[in] val binary value + @return whether parsing should proceed + @note It is safe to move the passed binary value. + */ + virtual bool binary(binary_t& val) = 0; + + /*! + @brief the beginning of an object was read + @param[in] elements number of object elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_object(std::size_t elements) = 0; + + /*! + @brief an object key was read + @param[in] val object key + @return whether parsing should proceed + @note It is safe to move the passed string. + */ + virtual bool key(string_t& val) = 0; + + /*! + @brief the end of an object was read + @return whether parsing should proceed + */ + virtual bool end_object() = 0; + + /*! + @brief the beginning of an array was read + @param[in] elements number of array elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_array(std::size_t elements) = 0; + + /*! + @brief the end of an array was read + @return whether parsing should proceed + */ + virtual bool end_array() = 0; + + /*! + @brief a parse error occurred + @param[in] position the position in the input where the error occurs + @param[in] last_token the last read token + @param[in] ex an exception object describing the error + @return whether parsing should proceed (must return false) + */ + virtual bool parse_error(std::size_t position, + const std::string& last_token, + const detail::exception& ex) = 0; + + json_sax() = default; + json_sax(const json_sax&) = default; + json_sax(json_sax&&) noexcept = default; + json_sax& operator=(const json_sax&) = default; + json_sax& operator=(json_sax&&) noexcept = default; + virtual ~json_sax() = default; +}; + + +namespace detail +{ +/*! +@brief SAX implementation to create a JSON value from SAX events + +This class implements the @ref json_sax interface and processes the SAX events +to create a JSON value which makes it basically a DOM parser. The structure or +hierarchy of the JSON value is managed by the stack `ref_stack` which contains +a pointer to the respective array or object for each recursion depth. + +After successful parsing, the value that is passed by reference to the +constructor contains the parsed value. + +@tparam BasicJsonType the JSON type +*/ +template<typename BasicJsonType> +class json_sax_dom_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @param[in,out] r reference to a JSON value that is manipulated while + parsing + @param[in] allow_exceptions_ whether parse errors yield exceptions + */ + explicit json_sax_dom_parser(BasicJsonType& r, const bool allow_exceptions_ = true) + : root(r), allow_exceptions(allow_exceptions_) + {} + + // make class move-only + json_sax_dom_parser(const json_sax_dom_parser&) = delete; + json_sax_dom_parser(json_sax_dom_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + json_sax_dom_parser& operator=(const json_sax_dom_parser&) = delete; + json_sax_dom_parser& operator=(json_sax_dom_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~json_sax_dom_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::object)); + + if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool key(string_t& val) + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_object()); + + // add null at given key and store the reference for later + object_element = &(ref_stack.back()->m_value.object->operator[](val)); + return true; + } + + bool end_object() + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_object()); + + ref_stack.back()->set_parents(); + ref_stack.pop_back(); + return true; + } + + bool start_array(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::array)); + + if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool end_array() + { + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(ref_stack.back()->is_array()); + + ref_stack.back()->set_parents(); + ref_stack.pop_back(); + return true; + } + + template<class Exception> + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast<void>(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + */ + template<typename Value> + JSON_HEDLEY_RETURNS_NON_NULL + BasicJsonType* handle_value(Value&& v) + { + if (ref_stack.empty()) + { + root = BasicJsonType(std::forward<Value>(v)); + return &root; + } + + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->emplace_back(std::forward<Value>(v)); + return &(ref_stack.back()->m_value.array->back()); + } + + JSON_ASSERT(ref_stack.back()->is_object()); + JSON_ASSERT(object_element); + *object_element = BasicJsonType(std::forward<Value>(v)); + return object_element; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector<BasicJsonType*> ref_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; +}; + +template<typename BasicJsonType> +class json_sax_dom_callback_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using parser_callback_t = typename BasicJsonType::parser_callback_t; + using parse_event_t = typename BasicJsonType::parse_event_t; + + json_sax_dom_callback_parser(BasicJsonType& r, + const parser_callback_t cb, + const bool allow_exceptions_ = true) + : root(r), callback(cb), allow_exceptions(allow_exceptions_) + { + keep_stack.push_back(true); + } + + // make class move-only + json_sax_dom_callback_parser(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser(json_sax_dom_callback_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + json_sax_dom_callback_parser& operator=(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser& operator=(json_sax_dom_callback_parser&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~json_sax_dom_callback_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + // check callback for object start + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::object_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::object, true); + ref_stack.push_back(val.second); + + // check object limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool key(string_t& val) + { + BasicJsonType k = BasicJsonType(val); + + // check callback for key + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::key, k); + key_keep_stack.push_back(keep); + + // add discarded value at given key and store the reference for later + if (keep && ref_stack.back()) + { + object_element = &(ref_stack.back()->m_value.object->operator[](val) = discarded); + } + + return true; + } + + bool end_object() + { + if (ref_stack.back()) + { + if (!callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::object_end, *ref_stack.back())) + { + // discard object + *ref_stack.back() = discarded; + } + else + { + ref_stack.back()->set_parents(); + } + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + if (!ref_stack.empty() && ref_stack.back() && ref_stack.back()->is_structured()) + { + // remove discarded value + for (auto it = ref_stack.back()->begin(); it != ref_stack.back()->end(); ++it) + { + if (it->is_discarded()) + { + ref_stack.back()->erase(it); + break; + } + } + } + + return true; + } + + bool start_array(std::size_t len) + { + const bool keep = callback(static_cast<int>(ref_stack.size()), parse_event_t::array_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::array, true); + ref_stack.push_back(val.second); + + // check array limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back())); + } + + return true; + } + + bool end_array() + { + bool keep = true; + + if (ref_stack.back()) + { + keep = callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::array_end, *ref_stack.back()); + if (keep) + { + ref_stack.back()->set_parents(); + } + else + { + // discard array + *ref_stack.back() = discarded; + } + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + // remove discarded value + if (!keep && !ref_stack.empty() && ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->pop_back(); + } + + return true; + } + + template<class Exception> + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast<void>(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @param[in] v value to add to the JSON value we build during parsing + @param[in] skip_callback whether we should skip calling the callback + function; this is required after start_array() and + start_object() SAX events, because otherwise we would call the + callback function with an empty array or object, respectively. + + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + + @return pair of boolean (whether value should be kept) and pointer (to the + passed value in the ref_stack hierarchy; nullptr if not kept) + */ + template<typename Value> + std::pair<bool, BasicJsonType*> handle_value(Value&& v, const bool skip_callback = false) + { + JSON_ASSERT(!keep_stack.empty()); + + // do not handle this value if we know it would be added to a discarded + // container + if (!keep_stack.back()) + { + return {false, nullptr}; + } + + // create value + auto value = BasicJsonType(std::forward<Value>(v)); + + // check callback + const bool keep = skip_callback || callback(static_cast<int>(ref_stack.size()), parse_event_t::value, value); + + // do not handle this value if we just learnt it shall be discarded + if (!keep) + { + return {false, nullptr}; + } + + if (ref_stack.empty()) + { + root = std::move(value); + return {true, &root}; + } + + // skip this value if we already decided to skip the parent + // (https://github.com/nlohmann/json/issues/971#issuecomment-413678360) + if (!ref_stack.back()) + { + return {false, nullptr}; + } + + // we now only expect arrays and objects + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + // array + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->emplace_back(std::move(value)); + return {true, &(ref_stack.back()->m_value.array->back())}; + } + + // object + JSON_ASSERT(ref_stack.back()->is_object()); + // check if we should store an element for the current key + JSON_ASSERT(!key_keep_stack.empty()); + const bool store_element = key_keep_stack.back(); + key_keep_stack.pop_back(); + + if (!store_element) + { + return {false, nullptr}; + } + + JSON_ASSERT(object_element); + *object_element = std::move(value); + return {true, object_element}; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector<BasicJsonType*> ref_stack {}; + /// stack to manage which values to keep + std::vector<bool> keep_stack {}; + /// stack to manage which object keys to keep + std::vector<bool> key_keep_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// callback function + const parser_callback_t callback = nullptr; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; + /// a discarded value for the callback + BasicJsonType discarded = BasicJsonType::value_t::discarded; +}; + +template<typename BasicJsonType> +class json_sax_acceptor +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + bool null() + { + return true; + } + + bool boolean(bool /*unused*/) + { + return true; + } + + bool number_integer(number_integer_t /*unused*/) + { + return true; + } + + bool number_unsigned(number_unsigned_t /*unused*/) + { + return true; + } + + bool number_float(number_float_t /*unused*/, const string_t& /*unused*/) + { + return true; + } + + bool string(string_t& /*unused*/) + { + return true; + } + + bool binary(binary_t& /*unused*/) + { + return true; + } + + bool start_object(std::size_t /*unused*/ = static_cast<std::size_t>(-1)) + { + return true; + } + + bool key(string_t& /*unused*/) + { + return true; + } + + bool end_object() + { + return true; + } + + bool start_array(std::size_t /*unused*/ = static_cast<std::size_t>(-1)) + { + return true; + } + + bool end_array() + { + return true; + } + + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, const detail::exception& /*unused*/) + { + return false; + } +}; +} // namespace detail + +} // namespace nlohmann + +// #include <nlohmann/detail/input/lexer.hpp> + + +#include <array> // array +#include <clocale> // localeconv +#include <cstddef> // size_t +#include <cstdio> // snprintf +#include <cstdlib> // strtof, strtod, strtold, strtoll, strtoull +#include <initializer_list> // initializer_list +#include <string> // char_traits, string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/position_t.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////// +// lexer // +/////////// + +template<typename BasicJsonType> +class lexer_base +{ + public: + /// token types for the parser + enum class token_type + { + uninitialized, ///< indicating the scanner is uninitialized + literal_true, ///< the `true` literal + literal_false, ///< the `false` literal + literal_null, ///< the `null` literal + value_string, ///< a string -- use get_string() for actual value + value_unsigned, ///< an unsigned integer -- use get_number_unsigned() for actual value + value_integer, ///< a signed integer -- use get_number_integer() for actual value + value_float, ///< an floating point number -- use get_number_float() for actual value + begin_array, ///< the character for array begin `[` + begin_object, ///< the character for object begin `{` + end_array, ///< the character for array end `]` + end_object, ///< the character for object end `}` + name_separator, ///< the name separator `:` + value_separator, ///< the value separator `,` + parse_error, ///< indicating a parse error + end_of_input, ///< indicating the end of the input buffer + literal_or_value ///< a literal or the begin of a value (only for diagnostics) + }; + + /// return name of values of type token_type (only used for errors) + JSON_HEDLEY_RETURNS_NON_NULL + JSON_HEDLEY_CONST + static const char* token_type_name(const token_type t) noexcept + { + switch (t) + { + case token_type::uninitialized: + return "<uninitialized>"; + case token_type::literal_true: + return "true literal"; + case token_type::literal_false: + return "false literal"; + case token_type::literal_null: + return "null literal"; + case token_type::value_string: + return "string literal"; + case token_type::value_unsigned: + case token_type::value_integer: + case token_type::value_float: + return "number literal"; + case token_type::begin_array: + return "'['"; + case token_type::begin_object: + return "'{'"; + case token_type::end_array: + return "']'"; + case token_type::end_object: + return "'}'"; + case token_type::name_separator: + return "':'"; + case token_type::value_separator: + return "','"; + case token_type::parse_error: + return "<parse error>"; + case token_type::end_of_input: + return "end of input"; + case token_type::literal_or_value: + return "'[', '{', or a literal"; + // LCOV_EXCL_START + default: // catch non-enum values + return "unknown token"; + // LCOV_EXCL_STOP + } + } +}; +/*! +@brief lexical analysis + +This class organizes the lexical analysis during JSON deserialization. +*/ +template<typename BasicJsonType, typename InputAdapterType> +class lexer : public lexer_base<BasicJsonType> +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits<char_type>::int_type; + + public: + using token_type = typename lexer_base<BasicJsonType>::token_type; + + explicit lexer(InputAdapterType&& adapter, bool ignore_comments_ = false) noexcept + : ia(std::move(adapter)) + , ignore_comments(ignore_comments_) + , decimal_point_char(static_cast<char_int_type>(get_decimal_point())) + {} + + // delete because of pointer members + lexer(const lexer&) = delete; + lexer(lexer&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + lexer& operator=(lexer&) = delete; + lexer& operator=(lexer&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~lexer() = default; + + private: + ///////////////////// + // locales + ///////////////////// + + /// return the locale-dependent decimal point + JSON_HEDLEY_PURE + static char get_decimal_point() noexcept + { + const auto* loc = localeconv(); + JSON_ASSERT(loc != nullptr); + return (loc->decimal_point == nullptr) ? '.' : *(loc->decimal_point); + } + + ///////////////////// + // scan functions + ///////////////////// + + /*! + @brief get codepoint from 4 hex characters following `\u` + + For input "\u c1 c2 c3 c4" the codepoint is: + (c1 * 0x1000) + (c2 * 0x0100) + (c3 * 0x0010) + c4 + = (c1 << 12) + (c2 << 8) + (c3 << 4) + (c4 << 0) + + Furthermore, the possible characters '0'..'9', 'A'..'F', and 'a'..'f' + must be converted to the integers 0x0..0x9, 0xA..0xF, 0xA..0xF, resp. The + conversion is done by subtracting the offset (0x30, 0x37, and 0x57) + between the ASCII value of the character and the desired integer value. + + @return codepoint (0x0000..0xFFFF) or -1 in case of an error (e.g. EOF or + non-hex character) + */ + int get_codepoint() + { + // this function only makes sense after reading `\u` + JSON_ASSERT(current == 'u'); + int codepoint = 0; + + const auto factors = { 12u, 8u, 4u, 0u }; + for (const auto factor : factors) + { + get(); + + if (current >= '0' && current <= '9') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x30u) << factor); + } + else if (current >= 'A' && current <= 'F') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x37u) << factor); + } + else if (current >= 'a' && current <= 'f') + { + codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x57u) << factor); + } + else + { + return -1; + } + } + + JSON_ASSERT(0x0000 <= codepoint && codepoint <= 0xFFFF); + return codepoint; + } + + /*! + @brief check if the next byte(s) are inside a given range + + Adds the current byte and, for each passed range, reads a new byte and + checks if it is inside the range. If a violation was detected, set up an + error message and return false. Otherwise, return true. + + @param[in] ranges list of integers; interpreted as list of pairs of + inclusive lower and upper bound, respectively + + @pre The passed list @a ranges must have 2, 4, or 6 elements; that is, + 1, 2, or 3 pairs. This precondition is enforced by an assertion. + + @return true if and only if no range violation was detected + */ + bool next_byte_in_range(std::initializer_list<char_int_type> ranges) + { + JSON_ASSERT(ranges.size() == 2 || ranges.size() == 4 || ranges.size() == 6); + add(current); + + for (auto range = ranges.begin(); range != ranges.end(); ++range) + { + get(); + if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range))) + { + add(current); + } + else + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return false; + } + } + + return true; + } + + /*! + @brief scan a string literal + + This function scans a string according to Sect. 7 of RFC 8259. While + scanning, bytes are escaped and copied into buffer token_buffer. Then the + function returns successfully, token_buffer is *not* null-terminated (as it + may contain \0 bytes), and token_buffer.size() is the number of bytes in the + string. + + @return token_type::value_string if string could be successfully scanned, + token_type::parse_error otherwise + + @note In case of errors, variable error_message contains a textual + description. + */ + token_type scan_string() + { + // reset token_buffer (ignore opening quote) + reset(); + + // we entered the function by reading an open quote + JSON_ASSERT(current == '\"'); + + while (true) + { + // get next character + switch (get()) + { + // end of file while parsing string + case std::char_traits<char_type>::eof(): + { + error_message = "invalid string: missing closing quote"; + return token_type::parse_error; + } + + // closing quote + case '\"': + { + return token_type::value_string; + } + + // escapes + case '\\': + { + switch (get()) + { + // quotation mark + case '\"': + add('\"'); + break; + // reverse solidus + case '\\': + add('\\'); + break; + // solidus + case '/': + add('/'); + break; + // backspace + case 'b': + add('\b'); + break; + // form feed + case 'f': + add('\f'); + break; + // line feed + case 'n': + add('\n'); + break; + // carriage return + case 'r': + add('\r'); + break; + // tab + case 't': + add('\t'); + break; + + // unicode escapes + case 'u': + { + const int codepoint1 = get_codepoint(); + int codepoint = codepoint1; // start with codepoint1 + + if (JSON_HEDLEY_UNLIKELY(codepoint1 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if code point is a high surrogate + if (0xD800 <= codepoint1 && codepoint1 <= 0xDBFF) + { + // expect next \uxxxx entry + if (JSON_HEDLEY_LIKELY(get() == '\\' && get() == 'u')) + { + const int codepoint2 = get_codepoint(); + + if (JSON_HEDLEY_UNLIKELY(codepoint2 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if codepoint2 is a low surrogate + if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 && codepoint2 <= 0xDFFF)) + { + // overwrite codepoint + codepoint = static_cast<int>( + // high surrogate occupies the most significant 22 bits + (static_cast<unsigned int>(codepoint1) << 10u) + // low surrogate occupies the least significant 15 bits + + static_cast<unsigned int>(codepoint2) + // there is still the 0xD800, 0xDC00 and 0x10000 noise + // in the result, so we have to subtract with: + // (0xD800 << 10) + DC00 - 0x10000 = 0x35FDC00 + - 0x35FDC00u); + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 && codepoint1 <= 0xDFFF)) + { + error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF"; + return token_type::parse_error; + } + } + + // result of the above calculation yields a proper codepoint + JSON_ASSERT(0x00 <= codepoint && codepoint <= 0x10FFFF); + + // translate codepoint into bytes + if (codepoint < 0x80) + { + // 1-byte characters: 0xxxxxxx (ASCII) + add(static_cast<char_int_type>(codepoint)); + } + else if (codepoint <= 0x7FF) + { + // 2-byte characters: 110xxxxx 10xxxxxx + add(static_cast<char_int_type>(0xC0u | (static_cast<unsigned int>(codepoint) >> 6u))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + else if (codepoint <= 0xFFFF) + { + // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx + add(static_cast<char_int_type>(0xE0u | (static_cast<unsigned int>(codepoint) >> 12u))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 6u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + else + { + // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + add(static_cast<char_int_type>(0xF0u | (static_cast<unsigned int>(codepoint) >> 18u))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 12u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | ((static_cast<unsigned int>(codepoint) >> 6u) & 0x3Fu))); + add(static_cast<char_int_type>(0x80u | (static_cast<unsigned int>(codepoint) & 0x3Fu))); + } + + break; + } + + // other characters after escape + default: + error_message = "invalid string: forbidden character after backslash"; + return token_type::parse_error; + } + + break; + } + + // invalid control characters + case 0x00: + { + error_message = "invalid string: control character U+0000 (NUL) must be escaped to \\u0000"; + return token_type::parse_error; + } + + case 0x01: + { + error_message = "invalid string: control character U+0001 (SOH) must be escaped to \\u0001"; + return token_type::parse_error; + } + + case 0x02: + { + error_message = "invalid string: control character U+0002 (STX) must be escaped to \\u0002"; + return token_type::parse_error; + } + + case 0x03: + { + error_message = "invalid string: control character U+0003 (ETX) must be escaped to \\u0003"; + return token_type::parse_error; + } + + case 0x04: + { + error_message = "invalid string: control character U+0004 (EOT) must be escaped to \\u0004"; + return token_type::parse_error; + } + + case 0x05: + { + error_message = "invalid string: control character U+0005 (ENQ) must be escaped to \\u0005"; + return token_type::parse_error; + } + + case 0x06: + { + error_message = "invalid string: control character U+0006 (ACK) must be escaped to \\u0006"; + return token_type::parse_error; + } + + case 0x07: + { + error_message = "invalid string: control character U+0007 (BEL) must be escaped to \\u0007"; + return token_type::parse_error; + } + + case 0x08: + { + error_message = "invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b"; + return token_type::parse_error; + } + + case 0x09: + { + error_message = "invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t"; + return token_type::parse_error; + } + + case 0x0A: + { + error_message = "invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n"; + return token_type::parse_error; + } + + case 0x0B: + { + error_message = "invalid string: control character U+000B (VT) must be escaped to \\u000B"; + return token_type::parse_error; + } + + case 0x0C: + { + error_message = "invalid string: control character U+000C (FF) must be escaped to \\u000C or \\f"; + return token_type::parse_error; + } + + case 0x0D: + { + error_message = "invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r"; + return token_type::parse_error; + } + + case 0x0E: + { + error_message = "invalid string: control character U+000E (SO) must be escaped to \\u000E"; + return token_type::parse_error; + } + + case 0x0F: + { + error_message = "invalid string: control character U+000F (SI) must be escaped to \\u000F"; + return token_type::parse_error; + } + + case 0x10: + { + error_message = "invalid string: control character U+0010 (DLE) must be escaped to \\u0010"; + return token_type::parse_error; + } + + case 0x11: + { + error_message = "invalid string: control character U+0011 (DC1) must be escaped to \\u0011"; + return token_type::parse_error; + } + + case 0x12: + { + error_message = "invalid string: control character U+0012 (DC2) must be escaped to \\u0012"; + return token_type::parse_error; + } + + case 0x13: + { + error_message = "invalid string: control character U+0013 (DC3) must be escaped to \\u0013"; + return token_type::parse_error; + } + + case 0x14: + { + error_message = "invalid string: control character U+0014 (DC4) must be escaped to \\u0014"; + return token_type::parse_error; + } + + case 0x15: + { + error_message = "invalid string: control character U+0015 (NAK) must be escaped to \\u0015"; + return token_type::parse_error; + } + + case 0x16: + { + error_message = "invalid string: control character U+0016 (SYN) must be escaped to \\u0016"; + return token_type::parse_error; + } + + case 0x17: + { + error_message = "invalid string: control character U+0017 (ETB) must be escaped to \\u0017"; + return token_type::parse_error; + } + + case 0x18: + { + error_message = "invalid string: control character U+0018 (CAN) must be escaped to \\u0018"; + return token_type::parse_error; + } + + case 0x19: + { + error_message = "invalid string: control character U+0019 (EM) must be escaped to \\u0019"; + return token_type::parse_error; + } + + case 0x1A: + { + error_message = "invalid string: control character U+001A (SUB) must be escaped to \\u001A"; + return token_type::parse_error; + } + + case 0x1B: + { + error_message = "invalid string: control character U+001B (ESC) must be escaped to \\u001B"; + return token_type::parse_error; + } + + case 0x1C: + { + error_message = "invalid string: control character U+001C (FS) must be escaped to \\u001C"; + return token_type::parse_error; + } + + case 0x1D: + { + error_message = "invalid string: control character U+001D (GS) must be escaped to \\u001D"; + return token_type::parse_error; + } + + case 0x1E: + { + error_message = "invalid string: control character U+001E (RS) must be escaped to \\u001E"; + return token_type::parse_error; + } + + case 0x1F: + { + error_message = "invalid string: control character U+001F (US) must be escaped to \\u001F"; + return token_type::parse_error; + } + + // U+0020..U+007F (except U+0022 (quote) and U+005C (backspace)) + case 0x20: + case 0x21: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + { + add(current); + break; + } + + // U+0080..U+07FF: bytes C2..DF 80..BF + case 0xC2: + case 0xC3: + case 0xC4: + case 0xC5: + case 0xC6: + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD5: + case 0xD6: + case 0xD7: + case 0xD8: + case 0xD9: + case 0xDA: + case 0xDB: + case 0xDC: + case 0xDD: + case 0xDE: + case 0xDF: + { + if (JSON_HEDLEY_UNLIKELY(!next_byte_in_range({0x80, 0xBF}))) + { + return token_type::parse_error; + } + break; + } + + // U+0800..U+0FFF: bytes E0 A0..BF 80..BF + case 0xE0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF + // U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xEE: + case 0xEF: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+D000..U+D7FF: bytes ED 80..9F 80..BF + case 0xED: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x9F, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+10000..U+3FFFF F0 90..BF 80..BF 80..BF + case 0xF0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF + case 0xF1: + case 0xF2: + case 0xF3: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+100000..U+10FFFF F4 80..8F 80..BF 80..BF + case 0xF4: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // remaining bytes (80..C1 and F5..FF) are ill-formed + default: + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return token_type::parse_error; + } + } + } + } + + /*! + * @brief scan a comment + * @return whether comment could be scanned successfully + */ + bool scan_comment() + { + switch (get()) + { + // single-line comments skip input until a newline or EOF is read + case '/': + { + while (true) + { + switch (get()) + { + case '\n': + case '\r': + case std::char_traits<char_type>::eof(): + case '\0': + return true; + + default: + break; + } + } + } + + // multi-line comments skip input until */ is read + case '*': + { + while (true) + { + switch (get()) + { + case std::char_traits<char_type>::eof(): + case '\0': + { + error_message = "invalid comment; missing closing '*/'"; + return false; + } + + case '*': + { + switch (get()) + { + case '/': + return true; + + default: + { + unget(); + continue; + } + } + } + + default: + continue; + } + } + } + + // unexpected character after reading '/' + default: + { + error_message = "invalid comment; expecting '/' or '*' after '/'"; + return false; + } + } + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(float& f, const char* str, char** endptr) noexcept + { + f = std::strtof(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(double& f, const char* str, char** endptr) noexcept + { + f = std::strtod(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(long double& f, const char* str, char** endptr) noexcept + { + f = std::strtold(str, endptr); + } + + /*! + @brief scan a number literal + + This function scans a string according to Sect. 6 of RFC 8259. + + The function is realized with a deterministic finite state machine derived + from the grammar described in RFC 8259. Starting in state "init", the + input is read and used to determined the next state. Only state "done" + accepts the number. State "error" is a trap state to model errors. In the + table below, "anything" means any character but the ones listed before. + + state | 0 | 1-9 | e E | + | - | . | anything + ---------|----------|----------|----------|---------|---------|----------|----------- + init | zero | any1 | [error] | [error] | minus | [error] | [error] + minus | zero | any1 | [error] | [error] | [error] | [error] | [error] + zero | done | done | exponent | done | done | decimal1 | done + any1 | any1 | any1 | exponent | done | done | decimal1 | done + decimal1 | decimal2 | decimal2 | [error] | [error] | [error] | [error] | [error] + decimal2 | decimal2 | decimal2 | exponent | done | done | done | done + exponent | any2 | any2 | [error] | sign | sign | [error] | [error] + sign | any2 | any2 | [error] | [error] | [error] | [error] | [error] + any2 | any2 | any2 | done | done | done | done | done + + The state machine is realized with one label per state (prefixed with + "scan_number_") and `goto` statements between them. The state machine + contains cycles, but any cycle can be left when EOF is read. Therefore, + the function is guaranteed to terminate. + + During scanning, the read bytes are stored in token_buffer. This string is + then converted to a signed integer, an unsigned integer, or a + floating-point number. + + @return token_type::value_unsigned, token_type::value_integer, or + token_type::value_float if number could be successfully scanned, + token_type::parse_error otherwise + + @note The scanner is independent of the current locale. Internally, the + locale's decimal point is used instead of `.` to work with the + locale-dependent converters. + */ + token_type scan_number() // lgtm [cpp/use-of-goto] + { + // reset token_buffer to store the number's bytes + reset(); + + // the type of the parsed number; initially set to unsigned; will be + // changed if minus sign, decimal point or exponent is read + token_type number_type = token_type::value_unsigned; + + // state (init): we just found out we need to scan a number + switch (current) + { + case '-': + { + add(current); + goto scan_number_minus; + } + + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + // all other characters are rejected outside scan_number() + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + +scan_number_minus: + // state: we just parsed a leading minus sign + number_type = token_type::value_integer; + switch (get()) + { + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + default: + { + error_message = "invalid number; expected digit after '-'"; + return token_type::parse_error; + } + } + +scan_number_zero: + // state: we just parse a zero (maybe with a leading minus sign) + switch (get()) + { + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_any1: + // state: we just parsed a number 0-9 (maybe with a leading minus sign) + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_decimal1: + // state: we just parsed a decimal point + number_type = token_type::value_float; + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + default: + { + error_message = "invalid number; expected digit after '.'"; + return token_type::parse_error; + } + } + +scan_number_decimal2: + // we just parsed at least one number after a decimal point + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_exponent: + // we just parsed an exponent + number_type = token_type::value_float; + switch (get()) + { + case '+': + case '-': + { + add(current); + goto scan_number_sign; + } + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = + "invalid number; expected '+', '-', or digit after exponent"; + return token_type::parse_error; + } + } + +scan_number_sign: + // we just parsed an exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = "invalid number; expected digit after exponent sign"; + return token_type::parse_error; + } + } + +scan_number_any2: + // we just parsed a number after the exponent or exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + goto scan_number_done; + } + +scan_number_done: + // unget the character after the number (we only read it to know that + // we are done scanning a number) + unget(); + + char* endptr = nullptr; // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + errno = 0; + + // try to parse integers first and fall back to floats + if (number_type == token_type::value_unsigned) + { + const auto x = std::strtoull(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_unsigned = static_cast<number_unsigned_t>(x); + if (value_unsigned == x) + { + return token_type::value_unsigned; + } + } + } + else if (number_type == token_type::value_integer) + { + const auto x = std::strtoll(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_integer = static_cast<number_integer_t>(x); + if (value_integer == x) + { + return token_type::value_integer; + } + } + } + + // this code is reached if we parse a floating-point number or if an + // integer conversion above failed + strtof(value_float, token_buffer.data(), &endptr); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + return token_type::value_float; + } + + /*! + @param[in] literal_text the literal text to expect + @param[in] length the length of the passed literal text + @param[in] return_type the token type to return on success + */ + JSON_HEDLEY_NON_NULL(2) + token_type scan_literal(const char_type* literal_text, const std::size_t length, + token_type return_type) + { + JSON_ASSERT(std::char_traits<char_type>::to_char_type(current) == literal_text[0]); + for (std::size_t i = 1; i < length; ++i) + { + if (JSON_HEDLEY_UNLIKELY(std::char_traits<char_type>::to_char_type(get()) != literal_text[i])) + { + error_message = "invalid literal"; + return token_type::parse_error; + } + } + return return_type; + } + + ///////////////////// + // input management + ///////////////////// + + /// reset token_buffer; current character is beginning of token + void reset() noexcept + { + token_buffer.clear(); + token_string.clear(); + token_string.push_back(std::char_traits<char_type>::to_char_type(current)); + } + + /* + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a + `std::char_traits<char>::eof()` in that case. Stores the scanned characters + for use in error messages. + + @return character read from the input + */ + char_int_type get() + { + ++position.chars_read_total; + ++position.chars_read_current_line; + + if (next_unget) + { + // just reset the next_unget variable and work with current + next_unget = false; + } + else + { + current = ia.get_character(); + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof())) + { + token_string.push_back(std::char_traits<char_type>::to_char_type(current)); + } + + if (current == '\n') + { + ++position.lines_read; + position.chars_read_current_line = 0; + } + + return current; + } + + /*! + @brief unget current character (read it again on next get) + + We implement unget by setting variable next_unget to true. The input is not + changed - we just simulate ungetting by modifying chars_read_total, + chars_read_current_line, and token_string. The next call to get() will + behave as if the unget character is read again. + */ + void unget() + { + next_unget = true; + + --position.chars_read_total; + + // in case we "unget" a newline, we have to also decrement the lines_read + if (position.chars_read_current_line == 0) + { + if (position.lines_read > 0) + { + --position.lines_read; + } + } + else + { + --position.chars_read_current_line; + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof())) + { + JSON_ASSERT(!token_string.empty()); + token_string.pop_back(); + } + } + + /// add a character to token_buffer + void add(char_int_type c) + { + token_buffer.push_back(static_cast<typename string_t::value_type>(c)); + } + + public: + ///////////////////// + // value getters + ///////////////////// + + /// return integer value + constexpr number_integer_t get_number_integer() const noexcept + { + return value_integer; + } + + /// return unsigned integer value + constexpr number_unsigned_t get_number_unsigned() const noexcept + { + return value_unsigned; + } + + /// return floating-point value + constexpr number_float_t get_number_float() const noexcept + { + return value_float; + } + + /// return current string value (implicitly resets the token; useful only once) + string_t& get_string() + { + return token_buffer; + } + + ///////////////////// + // diagnostics + ///////////////////// + + /// return position of last read token + constexpr position_t get_position() const noexcept + { + return position; + } + + /// return the last read token (for errors only). Will never contain EOF + /// (an arbitrary value that is not a valid char value, often -1), because + /// 255 may legitimately occur. May contain NUL, which should be escaped. + std::string get_token_string() const + { + // escape control characters + std::string result; + for (const auto c : token_string) + { + if (static_cast<unsigned char>(c) <= '\x1F') + { + // escape control characters + std::array<char, 9> cs{{}}; + static_cast<void>((std::snprintf)(cs.data(), cs.size(), "<U+%.4X>", static_cast<unsigned char>(c))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + result += cs.data(); + } + else + { + // add character as is + result.push_back(static_cast<std::string::value_type>(c)); + } + } + + return result; + } + + /// return syntax error message + JSON_HEDLEY_RETURNS_NON_NULL + constexpr const char* get_error_message() const noexcept + { + return error_message; + } + + ///////////////////// + // actual scanner + ///////////////////// + + /*! + @brief skip the UTF-8 byte order mark + @return true iff there is no BOM or the correct BOM has been skipped + */ + bool skip_bom() + { + if (get() == 0xEF) + { + // check if we completely parse the BOM + return get() == 0xBB && get() == 0xBF; + } + + // the first character is not the beginning of the BOM; unget it to + // process is later + unget(); + return true; + } + + void skip_whitespace() + { + do + { + get(); + } + while (current == ' ' || current == '\t' || current == '\n' || current == '\r'); + } + + token_type scan() + { + // initially, skip the BOM + if (position.chars_read_total == 0 && !skip_bom()) + { + error_message = "invalid BOM; must be 0xEF 0xBB 0xBF if given"; + return token_type::parse_error; + } + + // read next character and ignore whitespace + skip_whitespace(); + + // ignore comments + while (ignore_comments && current == '/') + { + if (!scan_comment()) + { + return token_type::parse_error; + } + + // skip following whitespace + skip_whitespace(); + } + + switch (current) + { + // structural characters + case '[': + return token_type::begin_array; + case ']': + return token_type::end_array; + case '{': + return token_type::begin_object; + case '}': + return token_type::end_object; + case ':': + return token_type::name_separator; + case ',': + return token_type::value_separator; + + // literals + case 't': + { + std::array<char_type, 4> true_literal = {{static_cast<char_type>('t'), static_cast<char_type>('r'), static_cast<char_type>('u'), static_cast<char_type>('e')}}; + return scan_literal(true_literal.data(), true_literal.size(), token_type::literal_true); + } + case 'f': + { + std::array<char_type, 5> false_literal = {{static_cast<char_type>('f'), static_cast<char_type>('a'), static_cast<char_type>('l'), static_cast<char_type>('s'), static_cast<char_type>('e')}}; + return scan_literal(false_literal.data(), false_literal.size(), token_type::literal_false); + } + case 'n': + { + std::array<char_type, 4> null_literal = {{static_cast<char_type>('n'), static_cast<char_type>('u'), static_cast<char_type>('l'), static_cast<char_type>('l')}}; + return scan_literal(null_literal.data(), null_literal.size(), token_type::literal_null); + } + + // string + case '\"': + return scan_string(); + + // number + case '-': + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return scan_number(); + + // end of input (the null byte is needed when parsing from + // string literals) + case '\0': + case std::char_traits<char_type>::eof(): + return token_type::end_of_input; + + // error + default: + error_message = "invalid literal"; + return token_type::parse_error; + } + } + + private: + /// input adapter + InputAdapterType ia; + + /// whether comments should be ignored (true) or signaled as errors (false) + const bool ignore_comments = false; + + /// the current character + char_int_type current = std::char_traits<char_type>::eof(); + + /// whether the next get() call should just return current + bool next_unget = false; + + /// the start position of the current token + position_t position {}; + + /// raw input token string (for error messages) + std::vector<char_type> token_string {}; + + /// buffer for variable-length tokens (numbers, strings) + string_t token_buffer {}; + + /// a description of occurred lexer errors + const char* error_message = ""; + + // number values + number_integer_t value_integer = 0; + number_unsigned_t value_unsigned = 0; + number_float_t value_float = 0; + + /// the decimal point + const char_int_type decimal_point_char = '.'; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/is_sax.hpp> + + +#include <cstdint> // size_t +#include <utility> // declval +#include <string> // string + +// #include <nlohmann/detail/meta/detected.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename T> +using null_function_t = decltype(std::declval<T&>().null()); + +template<typename T> +using boolean_function_t = + decltype(std::declval<T&>().boolean(std::declval<bool>())); + +template<typename T, typename Integer> +using number_integer_function_t = + decltype(std::declval<T&>().number_integer(std::declval<Integer>())); + +template<typename T, typename Unsigned> +using number_unsigned_function_t = + decltype(std::declval<T&>().number_unsigned(std::declval<Unsigned>())); + +template<typename T, typename Float, typename String> +using number_float_function_t = decltype(std::declval<T&>().number_float( + std::declval<Float>(), std::declval<const String&>())); + +template<typename T, typename String> +using string_function_t = + decltype(std::declval<T&>().string(std::declval<String&>())); + +template<typename T, typename Binary> +using binary_function_t = + decltype(std::declval<T&>().binary(std::declval<Binary&>())); + +template<typename T> +using start_object_function_t = + decltype(std::declval<T&>().start_object(std::declval<std::size_t>())); + +template<typename T, typename String> +using key_function_t = + decltype(std::declval<T&>().key(std::declval<String&>())); + +template<typename T> +using end_object_function_t = decltype(std::declval<T&>().end_object()); + +template<typename T> +using start_array_function_t = + decltype(std::declval<T&>().start_array(std::declval<std::size_t>())); + +template<typename T> +using end_array_function_t = decltype(std::declval<T&>().end_array()); + +template<typename T, typename Exception> +using parse_error_function_t = decltype(std::declval<T&>().parse_error( + std::declval<std::size_t>(), std::declval<const std::string&>(), + std::declval<const Exception&>())); + +template<typename SAX, typename BasicJsonType> +struct is_sax +{ + private: + static_assert(is_basic_json<BasicJsonType>::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static constexpr bool value = + is_detected_exact<bool, null_function_t, SAX>::value && + is_detected_exact<bool, boolean_function_t, SAX>::value && + is_detected_exact<bool, number_integer_function_t, SAX, number_integer_t>::value && + is_detected_exact<bool, number_unsigned_function_t, SAX, number_unsigned_t>::value && + is_detected_exact<bool, number_float_function_t, SAX, number_float_t, string_t>::value && + is_detected_exact<bool, string_function_t, SAX, string_t>::value && + is_detected_exact<bool, binary_function_t, SAX, binary_t>::value && + is_detected_exact<bool, start_object_function_t, SAX>::value && + is_detected_exact<bool, key_function_t, SAX, string_t>::value && + is_detected_exact<bool, end_object_function_t, SAX>::value && + is_detected_exact<bool, start_array_function_t, SAX>::value && + is_detected_exact<bool, end_array_function_t, SAX>::value && + is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value; +}; + +template<typename SAX, typename BasicJsonType> +struct is_sax_static_asserts +{ + private: + static_assert(is_basic_json<BasicJsonType>::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static_assert(is_detected_exact<bool, null_function_t, SAX>::value, + "Missing/invalid function: bool null()"); + static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert( + is_detected_exact<bool, number_integer_function_t, SAX, + number_integer_t>::value, + "Missing/invalid function: bool number_integer(number_integer_t)"); + static_assert( + is_detected_exact<bool, number_unsigned_function_t, SAX, + number_unsigned_t>::value, + "Missing/invalid function: bool number_unsigned(number_unsigned_t)"); + static_assert(is_detected_exact<bool, number_float_function_t, SAX, + number_float_t, string_t>::value, + "Missing/invalid function: bool number_float(number_float_t, const string_t&)"); + static_assert( + is_detected_exact<bool, string_function_t, SAX, string_t>::value, + "Missing/invalid function: bool string(string_t&)"); + static_assert( + is_detected_exact<bool, binary_function_t, SAX, binary_t>::value, + "Missing/invalid function: bool binary(binary_t&)"); + static_assert(is_detected_exact<bool, start_object_function_t, SAX>::value, + "Missing/invalid function: bool start_object(std::size_t)"); + static_assert(is_detected_exact<bool, key_function_t, SAX, string_t>::value, + "Missing/invalid function: bool key(string_t&)"); + static_assert(is_detected_exact<bool, end_object_function_t, SAX>::value, + "Missing/invalid function: bool end_object()"); + static_assert(is_detected_exact<bool, start_array_function_t, SAX>::value, + "Missing/invalid function: bool start_array(std::size_t)"); + static_assert(is_detected_exact<bool, end_array_function_t, SAX>::value, + "Missing/invalid function: bool end_array()"); + static_assert( + is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value, + "Missing/invalid function: bool parse_error(std::size_t, const " + "std::string&, const exception&)"); +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/// how to treat CBOR tags +enum class cbor_tag_handler_t +{ + error, ///< throw a parse_error exception in case of a tag + ignore, ///< ignore tags + store ///< store tags as binary type +}; + +/*! +@brief determine system byte order + +@return true if and only if system's byte order is little endian + +@note from https://stackoverflow.com/a/1001328/266378 +*/ +static inline bool little_endianness(int num = 1) noexcept +{ + return *reinterpret_cast<char*>(&num) == 1; +} + + +/////////////////// +// binary reader // +/////////////////// + +/*! +@brief deserialization of CBOR, MessagePack, and UBJSON values +*/ +template<typename BasicJsonType, typename InputAdapterType, typename SAX = json_sax_dom_parser<BasicJsonType>> +class binary_reader +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using json_sax_t = SAX; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits<char_type>::int_type; + + public: + /*! + @brief create a binary reader + + @param[in] adapter input adapter to read from + */ + explicit binary_reader(InputAdapterType&& adapter, const input_format_t format = input_format_t::json) noexcept : ia(std::move(adapter)), input_format(format) + { + (void)detail::is_sax_static_asserts<SAX, BasicJsonType> {}; + } + + // make class move-only + binary_reader(const binary_reader&) = delete; + binary_reader(binary_reader&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + binary_reader& operator=(const binary_reader&) = delete; + binary_reader& operator=(binary_reader&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) + ~binary_reader() = default; + + /*! + @param[in] format the binary format to parse + @param[in] sax_ a SAX event processor + @param[in] strict whether to expect the input to be consumed completed + @param[in] tag_handler how to treat CBOR tags + + @return whether parsing was successful + */ + JSON_HEDLEY_NON_NULL(3) + bool sax_parse(const input_format_t format, + json_sax_t* sax_, + const bool strict = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + sax = sax_; + bool result = false; + + switch (format) + { + case input_format_t::bson: + result = parse_bson_internal(); + break; + + case input_format_t::cbor: + result = parse_cbor_internal(true, tag_handler); + break; + + case input_format_t::msgpack: + result = parse_msgpack_internal(); + break; + + case input_format_t::ubjson: + case input_format_t::bjdata: + result = parse_ubjson_internal(); + break; + + case input_format_t::json: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + + // strict mode: next byte must be EOF + if (result && strict) + { + if (input_format == input_format_t::ubjson || input_format == input_format_t::bjdata) + { + get_ignore_noop(); + } + else + { + get(); + } + + if (JSON_HEDLEY_UNLIKELY(current != std::char_traits<char_type>::eof())) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(110, chars_read, + exception_message(input_format, concat("expected end of input; last byte: 0x", get_token_string()), "value"), nullptr)); + } + } + + return result; + } + + private: + ////////// + // BSON // + ////////// + + /*! + @brief Reads in a BSON-object and passes it to the SAX-parser. + @return whether a valid BSON-value was passed to the SAX parser + */ + bool parse_bson_internal() + { + std::int32_t document_size{}; + get_number<std::int32_t, true>(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/false))) + { + return false; + } + + return sax->end_object(); + } + + /*! + @brief Parses a C-style string from the BSON input. + @param[in,out] result A reference to the string variable where the read + string is to be stored. + @return `true` if the \x00-byte indicating the end of the string was + encountered before the EOF; false` indicates an unexpected EOF. + */ + bool get_bson_cstr(string_t& result) + { + auto out = std::back_inserter(result); + while (true) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "cstring"))) + { + return false; + } + if (current == 0x00) + { + return true; + } + *out++ = static_cast<typename string_t::value_type>(current); + } + } + + /*! + @brief Parses a zero-terminated string of length @a len from the BSON + input. + @param[in] len The length (including the zero-byte at the end) of the + string to be read. + @param[in,out] result A reference to the string variable where the read + string is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 1 + @return `true` if the string was successfully parsed + */ + template<typename NumberType> + bool get_bson_string(const NumberType len, string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 1)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::bson, concat("string length must be at least 1, is ", std::to_string(len)), "string"), nullptr)); + } + + return get_string(input_format_t::bson, len - static_cast<NumberType>(1), result) && get() != std::char_traits<char_type>::eof(); + } + + /*! + @brief Parses a byte array input of length @a len from the BSON input. + @param[in] len The length of the byte array to be read. + @param[in,out] result A reference to the binary variable where the read + array is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 0 + @return `true` if the byte array was successfully parsed + */ + template<typename NumberType> + bool get_bson_binary(const NumberType len, binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 0)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::bson, concat("byte array length cannot be negative, is ", std::to_string(len)), "binary"), nullptr)); + } + + // All BSON binary values have a subtype + std::uint8_t subtype{}; + get_number<std::uint8_t>(input_format_t::bson, subtype); + result.set_subtype(subtype); + + return get_binary(input_format_t::bson, len, result); + } + + /*! + @brief Read a BSON document element of the given @a element_type. + @param[in] element_type The BSON element type, c.f. http://bsonspec.org/spec.html + @param[in] element_type_parse_position The position in the input stream, + where the `element_type` was read. + @warning Not all BSON element types are supported yet. An unsupported + @a element_type will give rise to a parse_error.114: + Unsupported BSON record type 0x... + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_internal(const char_int_type element_type, + const std::size_t element_type_parse_position) + { + switch (element_type) + { + case 0x01: // double + { + double number{}; + return get_number<double, true>(input_format_t::bson, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0x02: // string + { + std::int32_t len{}; + string_t value; + return get_number<std::int32_t, true>(input_format_t::bson, len) && get_bson_string(len, value) && sax->string(value); + } + + case 0x03: // object + { + return parse_bson_internal(); + } + + case 0x04: // array + { + return parse_bson_array(); + } + + case 0x05: // binary + { + std::int32_t len{}; + binary_t value; + return get_number<std::int32_t, true>(input_format_t::bson, len) && get_bson_binary(len, value) && sax->binary(value); + } + + case 0x08: // boolean + { + return sax->boolean(get() != 0); + } + + case 0x0A: // null + { + return sax->null(); + } + + case 0x10: // int32 + { + std::int32_t value{}; + return get_number<std::int32_t, true>(input_format_t::bson, value) && sax->number_integer(value); + } + + case 0x12: // int64 + { + std::int64_t value{}; + return get_number<std::int64_t, true>(input_format_t::bson, value) && sax->number_integer(value); + } + + default: // anything else not supported (yet) + { + std::array<char, 3> cr{{}}; + static_cast<void>((std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast<unsigned char>(element_type))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + std::string cr_str{cr.data()}; + return sax->parse_error(element_type_parse_position, cr_str, + parse_error::create(114, element_type_parse_position, concat("Unsupported BSON record type 0x", cr_str), nullptr)); + } + } + } + + /*! + @brief Read a BSON element list (as specified in the BSON-spec) + + The same binary layout is used for objects and arrays, hence it must be + indicated with the argument @a is_array which one is expected + (true --> array, false --> object). + + @param[in] is_array Determines if the element list being read is to be + treated as an object (@a is_array == false), or as an + array (@a is_array == true). + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_list(const bool is_array) + { + string_t key; + + while (auto element_type = get()) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "element list"))) + { + return false; + } + + const std::size_t element_type_parse_position = chars_read; + if (JSON_HEDLEY_UNLIKELY(!get_bson_cstr(key))) + { + return false; + } + + if (!is_array && !sax->key(key)) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_internal(element_type, element_type_parse_position))) + { + return false; + } + + // get_bson_cstr only appends + key.clear(); + } + + return true; + } + + /*! + @brief Reads an array from the BSON input and passes it to the SAX-parser. + @return whether a valid BSON-array was passed to the SAX parser + */ + bool parse_bson_array() + { + std::int32_t document_size{}; + get_number<std::int32_t, true>(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/true))) + { + return false; + } + + return sax->end_array(); + } + + ////////// + // CBOR // + ////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true) or whether the last read character should + be considered instead (false) + @param[in] tag_handler how CBOR tags should be treated + + @return whether a valid CBOR value was passed to the SAX parser + */ + bool parse_cbor_internal(const bool get_char, + const cbor_tag_handler_t tag_handler) + { + switch (get_char ? get() : current) + { + // EOF + case std::char_traits<char_type>::eof(): + return unexpect_eof(input_format_t::cbor, "value"); + + // Integer 0x00..0x17 (0..23) + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + return sax->number_unsigned(static_cast<number_unsigned_t>(current)); + + case 0x18: // Unsigned integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x19: // Unsigned integer (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1A: // Unsigned integer (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1B: // Unsigned integer (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + // Negative integer -1-0x00..-1-0x17 (-1..-24) + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + return sax->number_integer(static_cast<std::int8_t>(0x20 - 1 - current)); + + case 0x38: // Negative integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x39: // Negative integer -1-n (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x3A: // Negative integer -1-n (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) - number); + } + + case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast<number_integer_t>(-1) + - static_cast<number_integer_t>(number)); + } + + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: // Binary data (one-byte uint8_t for n follows) + case 0x59: // Binary data (two-byte uint16_t for n follow) + case 0x5A: // Binary data (four-byte uint32_t for n follow) + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + case 0x5F: // Binary data (indefinite length) + { + binary_t b; + return get_cbor_binary(b) && sax->binary(b); + } + + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7F: // UTF-8 string (indefinite length) + { + string_t s; + return get_cbor_string(s) && sax->string(s); + } + + // array (0x00..0x17 data items follow) + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + return get_cbor_array(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x1Fu), tag_handler); + + case 0x98: // array (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x99: // array (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x9A: // array (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast<std::size_t>(len), tag_handler); + } + + case 0x9B: // array (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(detail::conditional_static_cast<std::size_t>(len), tag_handler); + } + + case 0x9F: // array (indefinite length) + return get_cbor_array(static_cast<std::size_t>(-1), tag_handler); + + // map (0x00..0x17 pairs of data items follow) + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + return get_cbor_object(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x1Fu), tag_handler); + + case 0xB8: // map (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xB9: // map (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xBA: // map (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast<std::size_t>(len), tag_handler); + } + + case 0xBB: // map (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(detail::conditional_static_cast<std::size_t>(len), tag_handler); + } + + case 0xBF: // map (indefinite length) + return get_cbor_object(static_cast<std::size_t>(-1), tag_handler); + + case 0xC6: // tagged item + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD8: // tagged item (1 bytes follow) + case 0xD9: // tagged item (2 bytes follow) + case 0xDA: // tagged item (4 bytes follow) + case 0xDB: // tagged item (8 bytes follow) + { + switch (tag_handler) + { + case cbor_tag_handler_t::error: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::cbor, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + + case cbor_tag_handler_t::ignore: + { + // ignore binary subtype + switch (current) + { + case 0xD8: + { + std::uint8_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xD9: + { + std::uint16_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xDA: + { + std::uint32_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + case 0xDB: + { + std::uint64_t subtype_to_ignore{}; + get_number(input_format_t::cbor, subtype_to_ignore); + break; + } + default: + break; + } + return parse_cbor_internal(true, tag_handler); + } + + case cbor_tag_handler_t::store: + { + binary_t b; + // use binary subtype and store in binary container + switch (current) + { + case 0xD8: + { + std::uint8_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xD9: + { + std::uint16_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xDA: + { + std::uint32_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + case 0xDB: + { + std::uint64_t subtype{}; + get_number(input_format_t::cbor, subtype); + b.set_subtype(detail::conditional_static_cast<typename binary_t::subtype_type>(subtype)); + break; + } + default: + return parse_cbor_internal(true, tag_handler); + } + get(); + return get_cbor_binary(b) && sax->binary(b); + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return false; // LCOV_EXCL_LINE + } + } + + case 0xF4: // false + return sax->boolean(false); + + case 0xF5: // true + return sax->boolean(true); + + case 0xF6: // null + return sax->null(); + + case 0xF9: // Half-Precision Float (two-byte IEEE 754) + { + const auto byte1_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + const auto byte2_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + + const auto byte1 = static_cast<unsigned char>(byte1_raw); + const auto byte2 = static_cast<unsigned char>(byte2_raw); + + // code from RFC 7049, Appendix D, Figure 3: + // As half-precision floating-point numbers were only added + // to IEEE 754 in 2008, today's programming platforms often + // still only have limited support for them. It is very + // easy to include at least decoding support for them even + // without such support. An example of a small decoder for + // half-precision floating-point numbers in the C language + // is shown in Fig. 3. + const auto half = static_cast<unsigned int>((byte1 << 8u) + byte2); + const double val = [&half] + { + const int exp = (half >> 10u) & 0x1Fu; + const unsigned int mant = half & 0x3FFu; + JSON_ASSERT(0 <= exp&& exp <= 32); + JSON_ASSERT(mant <= 1024); + switch (exp) + { + case 0: + return std::ldexp(mant, -24); + case 31: + return (mant == 0) + ? std::numeric_limits<double>::infinity() + : std::numeric_limits<double>::quiet_NaN(); + default: + return std::ldexp(mant + 1024, exp - 25); + } + }(); + return sax->number_float((half & 0x8000u) != 0 + ? static_cast<number_float_t>(-val) + : static_cast<number_float_t>(val), ""); + } + + case 0xFA: // Single-Precision Float (four-byte IEEE 754) + { + float number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xFB: // Double-Precision Float (eight-byte IEEE 754) + { + double number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + default: // anything else (0xFF is handled inside the other types) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::cbor, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + } + } + + /*! + @brief reads a CBOR string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + Additionally, CBOR's strings with indefinite lengths are supported. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_cbor_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "string"))) + { + return false; + } + + switch (current) + { + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + { + return get_string(input_format_t::cbor, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7F: // UTF-8 string (indefinite length) + { + while (get() != 0xFF) + { + string_t chunk; + if (!get_cbor_string(chunk)) + { + return false; + } + result.append(chunk); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::cbor, concat("expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0x", last_token), "string"), nullptr)); + } + } + } + + /*! + @brief reads a CBOR byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into the byte array. + Additionally, CBOR's byte arrays with indefinite lengths are supported. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_cbor_binary(binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "binary"))) + { + return false; + } + + switch (current) + { + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + { + return get_binary(input_format_t::cbor, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0x58: // Binary data (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x59: // Binary data (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5A: // Binary data (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5F: // Binary data (indefinite length) + { + while (get() != 0xFF) + { + binary_t chunk; + if (!get_cbor_binary(chunk)) + { + return false; + } + result.insert(result.end(), chunk.begin(), chunk.end()); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::cbor, concat("expected length specification (0x40-0x5B) or indefinite binary array type (0x5F); last byte: 0x", last_token), "binary"), nullptr)); + } + } + } + + /*! + @param[in] len the length of the array or static_cast<std::size_t>(-1) for an + array of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether array creation completed + */ + bool get_cbor_array(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + if (len != static_cast<std::size_t>(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(false, tag_handler))) + { + return false; + } + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object or static_cast<std::size_t>(-1) for an + object of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether object creation completed + */ + bool get_cbor_object(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + if (len != 0) + { + string_t key; + if (len != static_cast<std::size_t>(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + } + + return sax->end_object(); + } + + ///////////// + // MsgPack // + ///////////// + + /*! + @return whether a valid MessagePack value was passed to the SAX parser + */ + bool parse_msgpack_internal() + { + switch (get()) + { + // EOF + case std::char_traits<char_type>::eof(): + return unexpect_eof(input_format_t::msgpack, "value"); + + // positive fixint + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + case 0x18: + case 0x19: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5C: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + return sax->number_unsigned(static_cast<number_unsigned_t>(current)); + + // fixmap + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + return get_msgpack_object(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x0Fu)); + + // fixarray + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + case 0x98: + case 0x99: + case 0x9A: + case 0x9B: + case 0x9C: + case 0x9D: + case 0x9E: + case 0x9F: + return get_msgpack_array(static_cast<std::size_t>(static_cast<unsigned int>(current) & 0x0Fu)); + + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + case 0xD9: // str 8 + case 0xDA: // str 16 + case 0xDB: // str 32 + { + string_t s; + return get_msgpack_string(s) && sax->string(s); + } + + case 0xC0: // nil + return sax->null(); + + case 0xC2: // false + return sax->boolean(false); + + case 0xC3: // true + return sax->boolean(true); + + case 0xC4: // bin 8 + case 0xC5: // bin 16 + case 0xC6: // bin 32 + case 0xC7: // ext 8 + case 0xC8: // ext 16 + case 0xC9: // ext 32 + case 0xD4: // fixext 1 + case 0xD5: // fixext 2 + case 0xD6: // fixext 4 + case 0xD7: // fixext 8 + case 0xD8: // fixext 16 + { + binary_t b; + return get_msgpack_binary(b) && sax->binary(b); + } + + case 0xCA: // float 32 + { + float number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xCB: // float 64 + { + double number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 0xCC: // uint 8 + { + std::uint8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCD: // uint 16 + { + std::uint16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCE: // uint 32 + { + std::uint32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCF: // uint 64 + { + std::uint64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xD0: // int 8 + { + std::int8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD1: // int 16 + { + std::int16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD2: // int 32 + { + std::int32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD3: // int 64 + { + std::int64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xDC: // array 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast<std::size_t>(len)); + } + + case 0xDD: // array 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast<std::size_t>(len)); + } + + case 0xDE: // map 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast<std::size_t>(len)); + } + + case 0xDF: // map 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast<std::size_t>(len)); + } + + // negative fixint + case 0xE0: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xED: + case 0xEE: + case 0xEF: + case 0xF0: + case 0xF1: + case 0xF2: + case 0xF3: + case 0xF4: + case 0xF5: + case 0xF6: + case 0xF7: + case 0xF8: + case 0xF9: + case 0xFA: + case 0xFB: + case 0xFC: + case 0xFD: + case 0xFE: + case 0xFF: + return sax->number_integer(static_cast<std::int8_t>(current)); + + default: // anything else + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format_t::msgpack, concat("invalid byte: 0x", last_token), "value"), nullptr)); + } + } + } + + /*! + @brief reads a MessagePack string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_msgpack_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::msgpack, "string"))) + { + return false; + } + + switch (current) + { + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + { + return get_string(input_format_t::msgpack, static_cast<unsigned int>(current) & 0x1Fu, result); + } + + case 0xD9: // str 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDA: // str 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDB: // str 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format_t::msgpack, concat("expected length specification (0xA0-0xBF, 0xD9-0xDB); last byte: 0x", last_token), "string"), nullptr)); + } + } + } + + /*! + @brief reads a MessagePack byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into a byte array. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_msgpack_binary(binary_t& result) + { + // helper function to set the subtype + auto assign_and_return_true = [&result](std::int8_t subtype) + { + result.set_subtype(static_cast<std::uint8_t>(subtype)); + return true; + }; + + switch (current) + { + case 0xC4: // bin 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC5: // bin 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC6: // bin 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC7: // ext 8 + { + std::uint8_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC8: // ext 16 + { + std::uint16_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC9: // ext 32 + { + std::uint32_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xD4: // fixext 1 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 1, result) && + assign_and_return_true(subtype); + } + + case 0xD5: // fixext 2 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 2, result) && + assign_and_return_true(subtype); + } + + case 0xD6: // fixext 4 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 4, result) && + assign_and_return_true(subtype); + } + + case 0xD7: // fixext 8 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 8, result) && + assign_and_return_true(subtype); + } + + case 0xD8: // fixext 16 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 16, result) && + assign_and_return_true(subtype); + } + + default: // LCOV_EXCL_LINE + return false; // LCOV_EXCL_LINE + } + } + + /*! + @param[in] len the length of the array + @return whether array creation completed + */ + bool get_msgpack_array(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object + @return whether object creation completed + */ + bool get_msgpack_object(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + string_t key; + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_msgpack_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + key.clear(); + } + + return sax->end_object(); + } + + //////////// + // UBJSON // + //////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether a valid UBJSON value was passed to the SAX parser + */ + bool parse_ubjson_internal(const bool get_char = true) + { + return get_ubjson_value(get_char ? get_ignore_noop() : current); + } + + /*! + @brief reads a UBJSON string + + This function is either called after reading the 'S' byte explicitly + indicating a string, or in case of an object key where the 'S' byte can be + left out. + + @param[out] result created string + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether string creation completed + */ + bool get_ubjson_string(string_t& result, const bool get_char = true) + { + if (get_char) + { + get(); // TODO(niels): may we ignore N here? + } + + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "value"))) + { + return false; + } + + switch (current) + { + case 'U': + { + std::uint8_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'i': + { + std::int8_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'I': + { + std::int16_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'l': + { + std::int32_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'L': + { + std::int64_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t len{}; + return get_number(input_format, len) && get_string(input_format, len, result); + } + + default: + break; + } + auto last_token = get_token_string(); + std::string message; + + if (input_format != input_format_t::bjdata) + { + message = "expected length type specification (U, i, I, l, L); last byte: 0x" + last_token; + } + else + { + message = "expected length type specification (U, i, u, I, m, l, M, L); last byte: 0x" + last_token; + } + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format, message, "string"), nullptr)); + } + + /*! + @param[out] dim an integer vector storing the ND array dimensions + @return whether reading ND array size vector is successful + */ + bool get_ubjson_ndarray_size(std::vector<size_t>& dim) + { + std::pair<std::size_t, char_int_type> size_and_type; + size_t dimlen = 0; + bool no_ndarray = true; + + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type, no_ndarray))) + { + return false; + } + + if (size_and_type.first != string_t::npos) + { + if (size_and_type.second != 0) + { + if (size_and_type.second != 'N') + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray, size_and_type.second))) + { + return false; + } + dim.push_back(dimlen); + } + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray))) + { + return false; + } + dim.push_back(dimlen); + } + } + } + else + { + while (current != ']') + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_value(dimlen, no_ndarray, current))) + { + return false; + } + dim.push_back(dimlen); + get_ignore_noop(); + } + } + return true; + } + + /*! + @param[out] result determined size + @param[in,out] is_ndarray for input, `true` means already inside an ndarray vector + or ndarray dimension is not allowed; `false` means ndarray + is allowed; for output, `true` means an ndarray is found; + is_ndarray can only return `true` when its initial value + is `false` + @param[in] prefix type marker if already read, otherwise set to 0 + + @return whether size determination completed + */ + bool get_ubjson_size_value(std::size_t& result, bool& is_ndarray, char_int_type prefix = 0) + { + if (prefix == 0) + { + prefix = get_ignore_noop(); + } + + switch (prefix) + { + case 'U': + { + std::uint8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'i': + { + std::int8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); // NOLINT(bugprone-signed-char-misuse,cert-str34-c): number is not a char + return true; + } + + case 'I': + { + std::int16_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'l': + { + std::int32_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'L': + { + std::int64_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + if (number < 0) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, + exception_message(input_format, "count in an optimized container must be positive", "size"), nullptr)); + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = static_cast<std::size_t>(number); + return true; + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format, number))) + { + return false; + } + result = detail::conditional_static_cast<std::size_t>(number); + return true; + } + + case '[': + { + if (input_format != input_format_t::bjdata) + { + break; + } + if (is_ndarray) // ndarray dimensional vector can only contain integers, and can not embed another array + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimentional vector is not allowed", "size"), nullptr)); + } + std::vector<size_t> dim; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_ndarray_size(dim))) + { + return false; + } + if (dim.size() == 1 || (dim.size() == 2 && dim.at(0) == 1)) // return normal array size if 1D row vector + { + result = dim.at(dim.size() - 1); + return true; + } + if (!dim.empty()) // if ndarray, convert to an object in JData annotated array format + { + for (auto i : dim) // test if any dimension in an ndarray is 0, if so, return a 1D empty container + { + if ( i == 0 ) + { + result = 0; + return true; + } + } + + string_t key = "_ArraySize_"; + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(3) || !sax->key(key) || !sax->start_array(dim.size()))) + { + return false; + } + result = 1; + for (auto i : dim) + { + result *= i; + if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(static_cast<number_integer_t>(i)))) + { + return false; + } + } + is_ndarray = true; + return sax->end_array(); + } + result = 0; + return true; + } + + default: + break; + } + auto last_token = get_token_string(); + std::string message; + + if (input_format != input_format_t::bjdata) + { + message = "expected length type specification (U, i, I, l, L) after '#'; last byte: 0x" + last_token; + } + else + { + message = "expected length type specification (U, i, u, I, m, l, M, L) after '#'; last byte: 0x" + last_token; + } + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format, message, "size"), nullptr)); + } + + /*! + @brief determine the type and size for a container + + In the optimized UBJSON format, a type and a size can be provided to allow + for a more compact representation. + + @param[out] result pair of the size and the type + @param[in] inside_ndarray whether the parser is parsing an ND array dimensional vector + + @return whether pair creation completed + */ + bool get_ubjson_size_type(std::pair<std::size_t, char_int_type>& result, bool inside_ndarray = false) + { + result.first = string_t::npos; // size + result.second = 0; // type + bool is_ndarray = false; + + get_ignore_noop(); + + if (current == '$') + { + std::vector<char_int_type> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + result.second = get(); // must not ignore 'N', because 'N' maybe the type + if (JSON_HEDLEY_UNLIKELY( input_format == input_format_t::bjdata && std::find(bjdx.begin(), bjdx.end(), result.second) != bjdx.end() )) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, concat("marker 0x", last_token, " is not a permitted optimized array type"), "type"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "type"))) + { + return false; + } + + get_ignore_noop(); + if (JSON_HEDLEY_UNLIKELY(current != '#')) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "value"))) + { + return false; + } + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, concat("expected '#' after type information; last byte: 0x", last_token), "size"), nullptr)); + } + + bool is_error = get_ubjson_size_value(result.first, is_ndarray); + if (input_format == input_format_t::bjdata && is_ndarray) + { + if (inside_ndarray) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(112, chars_read, + exception_message(input_format, "ndarray can not be recursive", "size"), nullptr)); + } + result.second |= (1 << 8); // use bit 8 to indicate ndarray, all UBJSON and BJData markers should be ASCII letters + } + return is_error; + } + + if (current == '#') + { + bool is_error = get_ubjson_size_value(result.first, is_ndarray); + if (input_format == input_format_t::bjdata && is_ndarray) + { + return sax->parse_error(chars_read, get_token_string(), parse_error::create(112, chars_read, + exception_message(input_format, "ndarray requires both type and size", "size"), nullptr)); + } + return is_error; + } + + return true; + } + + /*! + @param prefix the previously read or set type prefix + @return whether value creation completed + */ + bool get_ubjson_value(const char_int_type prefix) + { + switch (prefix) + { + case std::char_traits<char_type>::eof(): // EOF + return unexpect_eof(input_format, "value"); + + case 'T': // true + return sax->boolean(true); + case 'F': // false + return sax->boolean(false); + + case 'Z': // null + return sax->null(); + + case 'U': + { + std::uint8_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'i': + { + std::int8_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'I': + { + std::int16_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'l': + { + std::int32_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'L': + { + std::int64_t number{}; + return get_number(input_format, number) && sax->number_integer(number); + } + + case 'u': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint16_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'm': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint32_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'M': + { + if (input_format != input_format_t::bjdata) + { + break; + } + std::uint64_t number{}; + return get_number(input_format, number) && sax->number_unsigned(number); + } + + case 'h': + { + if (input_format != input_format_t::bjdata) + { + break; + } + const auto byte1_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + const auto byte2_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + + const auto byte1 = static_cast<unsigned char>(byte1_raw); + const auto byte2 = static_cast<unsigned char>(byte2_raw); + + // code from RFC 7049, Appendix D, Figure 3: + // As half-precision floating-point numbers were only added + // to IEEE 754 in 2008, today's programming platforms often + // still only have limited support for them. It is very + // easy to include at least decoding support for them even + // without such support. An example of a small decoder for + // half-precision floating-point numbers in the C language + // is shown in Fig. 3. + const auto half = static_cast<unsigned int>((byte2 << 8u) + byte1); + const double val = [&half] + { + const int exp = (half >> 10u) & 0x1Fu; + const unsigned int mant = half & 0x3FFu; + JSON_ASSERT(0 <= exp&& exp <= 32); + JSON_ASSERT(mant <= 1024); + switch (exp) + { + case 0: + return std::ldexp(mant, -24); + case 31: + return (mant == 0) + ? std::numeric_limits<double>::infinity() + : std::numeric_limits<double>::quiet_NaN(); + default: + return std::ldexp(mant + 1024, exp - 25); + } + }(); + return sax->number_float((half & 0x8000u) != 0 + ? static_cast<number_float_t>(-val) + : static_cast<number_float_t>(val), ""); + } + + case 'd': + { + float number{}; + return get_number(input_format, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 'D': + { + double number{}; + return get_number(input_format, number) && sax->number_float(static_cast<number_float_t>(number), ""); + } + + case 'H': + { + return get_ubjson_high_precision_number(); + } + + case 'C': // char + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "char"))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(current > 127)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, + exception_message(input_format, concat("byte after 'C' must be in range 0x00..0x7F; last byte: 0x", last_token), "char"), nullptr)); + } + string_t s(1, static_cast<typename string_t::value_type>(current)); + return sax->string(s); + } + + case 'S': // string + { + string_t s; + return get_ubjson_string(s) && sax->string(s); + } + + case '[': // array + return get_ubjson_array(); + + case '{': // object + return get_ubjson_object(); + + default: // anything else + break; + } + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format, "invalid byte: 0x" + last_token, "value"), nullptr)); + } + + /*! + @return whether array creation completed + */ + bool get_ubjson_array() + { + std::pair<std::size_t, char_int_type> size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + // if bit-8 of size_and_type.second is set to 1, encode bjdata ndarray as an object in JData annotated array format (https://github.com/NeuroJSON/jdata): + // {"_ArrayType_" : "typeid", "_ArraySize_" : [n1, n2, ...], "_ArrayData_" : [v1, v2, ...]} + + if (input_format == input_format_t::bjdata && size_and_type.first != string_t::npos && (size_and_type.second & (1 << 8)) != 0) + { + std::map<char_int_type, string_t> bjdtype = {{'U', "uint8"}, {'i', "int8"}, {'u', "uint16"}, {'I', "int16"}, + {'m', "uint32"}, {'l', "int32"}, {'M', "uint64"}, {'L', "int64"}, {'d', "single"}, {'D', "double"}, {'C', "char"} + }; + + size_and_type.second &= ~(static_cast<char_int_type>(1) << 8); // use bit 8 to indicate ndarray, here we remove the bit to restore the type marker + + string_t key = "_ArrayType_"; + if (JSON_HEDLEY_UNLIKELY(bjdtype.count(size_and_type.second) == 0)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, "invalid byte: 0x" + last_token, "type"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->key(key) || !sax->string(bjdtype[size_and_type.second]) )) + { + return false; + } + + if (size_and_type.second == 'C') + { + size_and_type.second = 'U'; + } + + key = "_ArrayData_"; + if (JSON_HEDLEY_UNLIKELY(!sax->key(key) || !sax->start_array(size_and_type.first) )) + { + return false; + } + + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + } + + return (sax->end_array() && sax->end_object()); + } + + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + if (size_and_type.second != 'N') + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + } + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + while (current != ']') + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal(false))) + { + return false; + } + get_ignore_noop(); + } + } + + return sax->end_array(); + } + + /*! + @return whether object creation completed + */ + bool get_ubjson_object() + { + std::pair<std::size_t, char_int_type> size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + // do not accept ND-array size in objects in BJData + if (input_format == input_format_t::bjdata && size_and_type.first != string_t::npos && (size_and_type.second & (1 << 8)) != 0) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, + exception_message(input_format, "BJData object does not support ND-array size in optimized format", "object"), nullptr)); + } + + string_t key; + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + key.clear(); + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + key.clear(); + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + while (current != '}') + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key, false) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + get_ignore_noop(); + key.clear(); + } + } + + return sax->end_object(); + } + + // Note, no reader for UBJSON binary types is implemented because they do + // not exist + + bool get_ubjson_high_precision_number() + { + // get size of following number string + std::size_t size{}; + bool no_ndarray = true; + auto res = get_ubjson_size_value(size, no_ndarray); + if (JSON_HEDLEY_UNLIKELY(!res)) + { + return res; + } + + // get number string + std::vector<char> number_vector; + for (std::size_t i = 0; i < size; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format, "number"))) + { + return false; + } + number_vector.push_back(static_cast<char>(current)); + } + + // parse number string + using ia_type = decltype(detail::input_adapter(number_vector)); + auto number_lexer = detail::lexer<BasicJsonType, ia_type>(detail::input_adapter(number_vector), false); + const auto result_number = number_lexer.scan(); + const auto number_string = number_lexer.get_token_string(); + const auto result_remainder = number_lexer.scan(); + + using token_type = typename detail::lexer_base<BasicJsonType>::token_type; + + if (JSON_HEDLEY_UNLIKELY(result_remainder != token_type::end_of_input)) + { + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, + exception_message(input_format, concat("invalid number text: ", number_lexer.get_token_string()), "high-precision number"), nullptr)); + } + + switch (result_number) + { + case token_type::value_integer: + return sax->number_integer(number_lexer.get_number_integer()); + case token_type::value_unsigned: + return sax->number_unsigned(number_lexer.get_number_unsigned()); + case token_type::value_float: + return sax->number_float(number_lexer.get_number_float(), std::move(number_string)); + case token_type::uninitialized: + case token_type::literal_true: + case token_type::literal_false: + case token_type::literal_null: + case token_type::value_string: + case token_type::begin_array: + case token_type::begin_object: + case token_type::end_array: + case token_type::end_object: + case token_type::name_separator: + case token_type::value_separator: + case token_type::parse_error: + case token_type::end_of_input: + case token_type::literal_or_value: + default: + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, + exception_message(input_format, concat("invalid number text: ", number_lexer.get_token_string()), "high-precision number"), nullptr)); + } + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /*! + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a -'ve valued + `std::char_traits<char_type>::eof()` in that case. + + @return character read from the input + */ + char_int_type get() + { + ++chars_read; + return current = ia.get_character(); + } + + /*! + @return character read from the input after ignoring all 'N' entries + */ + char_int_type get_ignore_noop() + { + do + { + get(); + } + while (current == 'N'); + + return current; + } + + /* + @brief read a number from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[out] result number of type @a NumberType + + @return whether conversion completed + + @note This function needs to respect the system's endianness, because + bytes in CBOR, MessagePack, and UBJSON are stored in network order + (big endian) and therefore need reordering on little endian systems. + On the other hand, BSON and BJData use little endian and should reorder + on big endian systems. + */ + template<typename NumberType, bool InputIsLittleEndian = false> + bool get_number(const input_format_t format, NumberType& result) + { + // step 1: read input into array with system's byte order + std::array<std::uint8_t, sizeof(NumberType)> vec{}; + for (std::size_t i = 0; i < sizeof(NumberType); ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "number"))) + { + return false; + } + + // reverse byte order prior to conversion if necessary + if (is_little_endian != (InputIsLittleEndian || format == input_format_t::bjdata)) + { + vec[sizeof(NumberType) - i - 1] = static_cast<std::uint8_t>(current); + } + else + { + vec[i] = static_cast<std::uint8_t>(current); // LCOV_EXCL_LINE + } + } + + // step 2: convert array into number of type T and return + std::memcpy(&result, vec.data(), sizeof(NumberType)); + return true; + } + + /*! + @brief create a string by reading characters from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of characters to read + @param[out] result string created by reading @a len bytes + + @return whether string creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of string memory. + */ + template<typename NumberType> + bool get_string(const input_format_t format, + const NumberType len, + string_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "string"))) + { + success = false; + break; + } + result.push_back(static_cast<typename string_t::value_type>(current)); + } + return success; + } + + /*! + @brief create a byte array by reading bytes from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of bytes to read + @param[out] result byte array created by reading @a len bytes + + @return whether byte array creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of memory. + */ + template<typename NumberType> + bool get_binary(const input_format_t format, + const NumberType len, + binary_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "binary"))) + { + success = false; + break; + } + result.push_back(static_cast<std::uint8_t>(current)); + } + return success; + } + + /*! + @param[in] format the current format (for diagnostics) + @param[in] context further context information (for diagnostics) + @return whether the last read character is not EOF + */ + JSON_HEDLEY_NON_NULL(3) + bool unexpect_eof(const input_format_t format, const char* context) const + { + if (JSON_HEDLEY_UNLIKELY(current == std::char_traits<char_type>::eof())) + { + return sax->parse_error(chars_read, "<end of file>", + parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context), nullptr)); + } + return true; + } + + /*! + @return a string representation of the last read byte + */ + std::string get_token_string() const + { + std::array<char, 3> cr{{}}; + static_cast<void>((std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast<unsigned char>(current))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + return std::string{cr.data()}; + } + + /*! + @param[in] format the current format + @param[in] detail a detailed error message + @param[in] context further context information + @return a message string to use in the parse_error exceptions + */ + std::string exception_message(const input_format_t format, + const std::string& detail, + const std::string& context) const + { + std::string error_msg = "syntax error while parsing "; + + switch (format) + { + case input_format_t::cbor: + error_msg += "CBOR"; + break; + + case input_format_t::msgpack: + error_msg += "MessagePack"; + break; + + case input_format_t::ubjson: + error_msg += "UBJSON"; + break; + + case input_format_t::bson: + error_msg += "BSON"; + break; + + case input_format_t::bjdata: + error_msg += "BJData"; + break; + + case input_format_t::json: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + + return concat(error_msg, ' ', context, ": ", detail); + } + + private: + /// input adapter + InputAdapterType ia; + + /// the current character + char_int_type current = std::char_traits<char_type>::eof(); + + /// the number of characters read + std::size_t chars_read = 0; + + /// whether we can assume little endianness + const bool is_little_endian = little_endianness(); + + /// input format + const input_format_t input_format = input_format_t::json; + + /// the SAX parser + json_sax_t* sax = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/lexer.hpp> + +// #include <nlohmann/detail/input/parser.hpp> + + +#include <cmath> // isfinite +#include <cstdint> // uint8_t +#include <functional> // function +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/input/input_adapters.hpp> + +// #include <nlohmann/detail/input/json_sax.hpp> + +// #include <nlohmann/detail/input/lexer.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/is_sax.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +//////////// +// parser // +//////////// + +enum class parse_event_t : std::uint8_t +{ + /// the parser read `{` and started to process a JSON object + object_start, + /// the parser read `}` and finished processing a JSON object + object_end, + /// the parser read `[` and started to process a JSON array + array_start, + /// the parser read `]` and finished processing a JSON array + array_end, + /// the parser read a key of a value in an object + key, + /// the parser finished reading a JSON value + value +}; + +template<typename BasicJsonType> +using parser_callback_t = + std::function<bool(int /*depth*/, parse_event_t /*event*/, BasicJsonType& /*parsed*/)>; + +/*! +@brief syntax analysis + +This class implements a recursive descent parser. +*/ +template<typename BasicJsonType, typename InputAdapterType> +class parser +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using lexer_t = lexer<BasicJsonType, InputAdapterType>; + using token_type = typename lexer_t::token_type; + + public: + /// a parser reading from an input adapter + explicit parser(InputAdapterType&& adapter, + const parser_callback_t<BasicJsonType> cb = nullptr, + const bool allow_exceptions_ = true, + const bool skip_comments = false) + : callback(cb) + , m_lexer(std::move(adapter), skip_comments) + , allow_exceptions(allow_exceptions_) + { + // read first token + get_token(); + } + + /*! + @brief public parser interface + + @param[in] strict whether to expect the last token to be EOF + @param[in,out] result parsed JSON value + + @throw parse_error.101 in case of an unexpected token + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + */ + void parse(const bool strict, BasicJsonType& result) + { + if (callback) + { + json_sax_dom_callback_parser<BasicJsonType> sdp(result, callback, allow_exceptions); + sax_parse_internal(&sdp); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_of_input, "value"), nullptr)); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + + // set top-level value to null if it was discarded by the callback + // function + if (result.is_discarded()) + { + result = nullptr; + } + } + else + { + json_sax_dom_parser<BasicJsonType> sdp(result, allow_exceptions); + sax_parse_internal(&sdp); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr)); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + } + + result.assert_invariant(); + } + + /*! + @brief public accept interface + + @param[in] strict whether to expect the last token to be EOF + @return whether the input is a proper JSON text + */ + bool accept(const bool strict = true) + { + json_sax_acceptor<BasicJsonType> sax_acceptor; + return sax_parse(&sax_acceptor, strict); + } + + template<typename SAX> + JSON_HEDLEY_NON_NULL(2) + bool sax_parse(SAX* sax, const bool strict = true) + { + (void)detail::is_sax_static_asserts<SAX, BasicJsonType> {}; + const bool result = sax_parse_internal(sax); + + // strict mode: next byte must be EOF + if (result && strict && (get_token() != token_type::end_of_input)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr)); + } + + return result; + } + + private: + template<typename SAX> + JSON_HEDLEY_NON_NULL(2) + bool sax_parse_internal(SAX* sax) + { + // stack to remember the hierarchy of structured values we are parsing + // true = array; false = object + std::vector<bool> states; + // value to avoid a goto (see comment where set to true) + bool skip_to_state_evaluation = false; + + while (true) + { + if (!skip_to_state_evaluation) + { + // invariant: get_token() was called before each iteration + switch (last_token) + { + case token_type::begin_object: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1)))) + { + return false; + } + + // closing } -> we are done + if (get_token() == token_type::end_object) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + break; + } + + // parse key + if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr)); + } + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr)); + } + + // remember we are now inside an object + states.push_back(false); + + // parse values + get_token(); + continue; + } + + case token_type::begin_array: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1)))) + { + return false; + } + + // closing ] -> we are done + if (get_token() == token_type::end_array) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + break; + } + + // remember we are now inside an array + states.push_back(true); + + // parse values (no need to call get_token) + continue; + } + + case token_type::value_float: + { + const auto res = m_lexer.get_number_float(); + + if (JSON_HEDLEY_UNLIKELY(!std::isfinite(res))) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + out_of_range::create(406, concat("number overflow parsing '", m_lexer.get_token_string(), '\''), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->number_float(res, m_lexer.get_string()))) + { + return false; + } + + break; + } + + case token_type::literal_false: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(false))) + { + return false; + } + break; + } + + case token_type::literal_null: + { + if (JSON_HEDLEY_UNLIKELY(!sax->null())) + { + return false; + } + break; + } + + case token_type::literal_true: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(true))) + { + return false; + } + break; + } + + case token_type::value_integer: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(m_lexer.get_number_integer()))) + { + return false; + } + break; + } + + case token_type::value_string: + { + if (JSON_HEDLEY_UNLIKELY(!sax->string(m_lexer.get_string()))) + { + return false; + } + break; + } + + case token_type::value_unsigned: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_unsigned(m_lexer.get_number_unsigned()))) + { + return false; + } + break; + } + + case token_type::parse_error: + { + // using "uninitialized" to avoid "expected" message + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized, "value"), nullptr)); + } + + case token_type::uninitialized: + case token_type::end_array: + case token_type::end_object: + case token_type::name_separator: + case token_type::value_separator: + case token_type::end_of_input: + case token_type::literal_or_value: + default: // the last token was unexpected + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value, "value"), nullptr)); + } + } + } + else + { + skip_to_state_evaluation = false; + } + + // we reached this line after we successfully parsed a value + if (states.empty()) + { + // empty stack: we reached the end of the hierarchy: done + return true; + } + + if (states.back()) // array + { + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse a new value + get_token(); + continue; + } + + // closing ] + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + + // We are done with this array. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array, "array"), nullptr)); + } + + // states.back() is false -> object + + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse key + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr)); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr)); + } + + // parse values + get_token(); + continue; + } + + // closing } + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + + // We are done with this object. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object, "object"), nullptr)); + } + } + + /// get next token from lexer + token_type get_token() + { + return last_token = m_lexer.scan(); + } + + std::string exception_message(const token_type expected, const std::string& context) + { + std::string error_msg = "syntax error "; + + if (!context.empty()) + { + error_msg += concat("while parsing ", context, ' '); + } + + error_msg += "- "; + + if (last_token == token_type::parse_error) + { + error_msg += concat(m_lexer.get_error_message(), "; last read: '", + m_lexer.get_token_string(), '\''); + } + else + { + error_msg += concat("unexpected ", lexer_t::token_type_name(last_token)); + } + + if (expected != token_type::uninitialized) + { + error_msg += concat("; expected ", lexer_t::token_type_name(expected)); + } + + return error_msg; + } + + private: + /// callback function + const parser_callback_t<BasicJsonType> callback = nullptr; + /// the type of the last read token + token_type last_token = token_type::uninitialized; + /// the lexer + lexer_t m_lexer; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; +}; + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/internal_iterator.hpp> + + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + + +#include <cstddef> // ptrdiff_t +#include <limits> // numeric_limits + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/* +@brief an iterator for primitive JSON types + +This class models an iterator for primitive JSON types (boolean, number, +string). It's only purpose is to allow the iterator/const_iterator classes +to "iterate" over primitive values. Internally, the iterator is modeled by +a `difference_type` variable. Value begin_value (`0`) models the begin, +end_value (`1`) models past the end. +*/ +class primitive_iterator_t +{ + private: + using difference_type = std::ptrdiff_t; + static constexpr difference_type begin_value = 0; + static constexpr difference_type end_value = begin_value + 1; + + JSON_PRIVATE_UNLESS_TESTED: + /// iterator as signed integer type + difference_type m_it = (std::numeric_limits<std::ptrdiff_t>::min)(); + + public: + constexpr difference_type get_value() const noexcept + { + return m_it; + } + + /// set iterator to a defined beginning + void set_begin() noexcept + { + m_it = begin_value; + } + + /// set iterator to a defined past the end + void set_end() noexcept + { + m_it = end_value; + } + + /// return whether the iterator can be dereferenced + constexpr bool is_begin() const noexcept + { + return m_it == begin_value; + } + + /// return whether the iterator is at end + constexpr bool is_end() const noexcept + { + return m_it == end_value; + } + + friend constexpr bool operator==(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it == rhs.m_it; + } + + friend constexpr bool operator<(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it < rhs.m_it; + } + + primitive_iterator_t operator+(difference_type n) noexcept + { + auto result = *this; + result += n; + return result; + } + + friend constexpr difference_type operator-(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it - rhs.m_it; + } + + primitive_iterator_t& operator++() noexcept + { + ++m_it; + return *this; + } + + primitive_iterator_t operator++(int)& noexcept // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + ++m_it; + return result; + } + + primitive_iterator_t& operator--() noexcept + { + --m_it; + return *this; + } + + primitive_iterator_t operator--(int)& noexcept // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + --m_it; + return result; + } + + primitive_iterator_t& operator+=(difference_type n) noexcept + { + m_it += n; + return *this; + } + + primitive_iterator_t& operator-=(difference_type n) noexcept + { + m_it -= n; + return *this; + } +}; +} // namespace detail +} // namespace nlohmann + + +namespace nlohmann +{ +namespace detail +{ +/*! +@brief an iterator value + +@note This structure could easily be a union, but MSVC currently does not allow +unions members with complex constructors, see https://github.com/nlohmann/json/pull/105. +*/ +template<typename BasicJsonType> struct internal_iterator +{ + /// iterator for JSON objects + typename BasicJsonType::object_t::iterator object_iterator {}; + /// iterator for JSON arrays + typename BasicJsonType::array_t::iterator array_iterator {}; + /// generic iterator for all other types + primitive_iterator_t primitive_iterator {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/iter_impl.hpp> + + +#include <iterator> // iterator, random_access_iterator_tag, bidirectional_iterator_tag, advance, next +#include <type_traits> // conditional, is_const, remove_const + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/iterators/internal_iterator.hpp> + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +// forward declare, to be able to friend it later on +template<typename IteratorType> class iteration_proxy; +template<typename IteratorType> class iteration_proxy_value; + +/*! +@brief a template for a bidirectional iterator for the @ref basic_json class +This class implements a both iterators (iterator and const_iterator) for the +@ref basic_json class. +@note An iterator is called *initialized* when a pointer to a JSON value has + been set (e.g., by a constructor or a copy assignment). If the iterator is + default-constructed, it is *uninitialized* and most methods are undefined. + **The library uses assertions to detect calls on uninitialized iterators.** +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +@since version 1.0.0, simplified in version 2.0.9, change to bidirectional + iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593) +*/ +template<typename BasicJsonType> +class iter_impl // NOLINT(cppcoreguidelines-special-member-functions,hicpp-special-member-functions) +{ + /// the iterator with BasicJsonType of different const-ness + using other_iter_impl = iter_impl<typename std::conditional<std::is_const<BasicJsonType>::value, typename std::remove_const<BasicJsonType>::type, const BasicJsonType>::type>; + /// allow basic_json to access private members + friend other_iter_impl; + friend BasicJsonType; + friend iteration_proxy<iter_impl>; + friend iteration_proxy_value<iter_impl>; + + using object_t = typename BasicJsonType::object_t; + using array_t = typename BasicJsonType::array_t; + // make sure BasicJsonType is basic_json or const basic_json + static_assert(is_basic_json<typename std::remove_const<BasicJsonType>::type>::value, + "iter_impl only accepts (const) basic_json"); + // superficial check for the LegacyBidirectionalIterator named requirement + static_assert(std::is_base_of<std::bidirectional_iterator_tag, std::bidirectional_iterator_tag>::value + && std::is_base_of<std::bidirectional_iterator_tag, typename array_t::iterator::iterator_category>::value, + "basic_json iterator assumes array and object type iterators satisfy the LegacyBidirectionalIterator named requirement."); + + public: + /// The std::iterator class template (used as a base class to provide typedefs) is deprecated in C++17. + /// The C++ Standard has never required user-defined iterators to derive from std::iterator. + /// A user-defined iterator should provide publicly accessible typedefs named + /// iterator_category, value_type, difference_type, pointer, and reference. + /// Note that value_type is required to be non-const, even for constant iterators. + using iterator_category = std::bidirectional_iterator_tag; + + /// the type of the values when the iterator is dereferenced + using value_type = typename BasicJsonType::value_type; + /// a type to represent differences between iterators + using difference_type = typename BasicJsonType::difference_type; + /// defines a pointer to the type iterated over (value_type) + using pointer = typename std::conditional<std::is_const<BasicJsonType>::value, + typename BasicJsonType::const_pointer, + typename BasicJsonType::pointer>::type; + /// defines a reference to the type iterated over (value_type) + using reference = + typename std::conditional<std::is_const<BasicJsonType>::value, + typename BasicJsonType::const_reference, + typename BasicJsonType::reference>::type; + + iter_impl() = default; + ~iter_impl() = default; + iter_impl(iter_impl&&) noexcept = default; + iter_impl& operator=(iter_impl&&) noexcept = default; + + /*! + @brief constructor for a given JSON instance + @param[in] object pointer to a JSON object for this iterator + @pre object != nullptr + @post The iterator is initialized; i.e. `m_object != nullptr`. + */ + explicit iter_impl(pointer object) noexcept : m_object(object) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = typename object_t::iterator(); + break; + } + + case value_t::array: + { + m_it.array_iterator = typename array_t::iterator(); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator = primitive_iterator_t(); + break; + } + } + } + + /*! + @note The conventional copy constructor and copy assignment are implicitly + defined. Combined with the following converting constructor and + assignment, they support: (1) copy from iterator to iterator, (2) + copy from const iterator to const iterator, and (3) conversion from + iterator to const iterator. However conversion from const iterator + to iterator is not defined. + */ + + /*! + @brief const copy constructor + @param[in] other const iterator to copy from + @note This copy constructor had to be defined explicitly to circumvent a bug + occurring on msvc v19.0 compiler (VS 2015) debug build. For more + information refer to: https://github.com/nlohmann/json/issues/1608 + */ + iter_impl(const iter_impl<const BasicJsonType>& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl<const BasicJsonType>& other) noexcept + { + if (&other != this) + { + m_object = other.m_object; + m_it = other.m_it; + } + return *this; + } + + /*! + @brief converting constructor + @param[in] other non-const iterator to copy from + @note It is not checked whether @a other is initialized. + */ + iter_impl(const iter_impl<typename std::remove_const<BasicJsonType>::type>& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other non-const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl<typename std::remove_const<BasicJsonType>::type>& other) noexcept // NOLINT(cert-oop54-cpp) + { + m_object = other.m_object; + m_it = other.m_it; + return *this; + } + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief set the iterator to the first value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_begin() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->begin(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->begin(); + break; + } + + case value_t::null: + { + // set to end so begin()==end() is true: null is empty + m_it.primitive_iterator.set_end(); + break; + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator.set_begin(); + break; + } + } + } + + /*! + @brief set the iterator past the last value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_end() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->end(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->end(); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator.set_end(); + break; + } + } + } + + public: + /*! + @brief return a reference to the value pointed to by the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator*() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return m_it.object_iterator->second; + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return *m_it.array_iterator; + } + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief dereference the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + pointer operator->() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return &(m_it.object_iterator->second); + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return &*m_it.array_iterator; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief post-increment (it++) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator++(int)& // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + ++(*this); + return result; + } + + /*! + @brief pre-increment (++it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator++() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, 1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, 1); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + ++m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief post-decrement (it--) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator--(int)& // NOLINT(cert-dcl21-cpp) + { + auto result = *this; + --(*this); + return result; + } + + /*! + @brief pre-decrement (--it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator--() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, -1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, -1); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + --m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief comparison: equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + template < typename IterImpl, detail::enable_if_t < (std::is_same<IterImpl, iter_impl>::value || std::is_same<IterImpl, other_iter_impl>::value), std::nullptr_t > = nullptr > + bool operator==(const IterImpl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers", m_object)); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + return (m_it.object_iterator == other.m_it.object_iterator); + + case value_t::array: + return (m_it.array_iterator == other.m_it.array_iterator); + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return (m_it.primitive_iterator == other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: not equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + template < typename IterImpl, detail::enable_if_t < (std::is_same<IterImpl, iter_impl>::value || std::is_same<IterImpl, other_iter_impl>::value), std::nullptr_t > = nullptr > + bool operator!=(const IterImpl& other) const + { + return !operator==(other); + } + + /*! + @brief comparison: smaller + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<(const iter_impl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers", m_object)); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(213, "cannot compare order of object iterators", m_object)); + + case value_t::array: + return (m_it.array_iterator < other.m_it.array_iterator); + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return (m_it.primitive_iterator < other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: less than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<=(const iter_impl& other) const + { + return !other.operator < (*this); + } + + /*! + @brief comparison: greater than + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>(const iter_impl& other) const + { + return !operator<=(other); + } + + /*! + @brief comparison: greater than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>=(const iter_impl& other) const + { + return !operator<(other); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator+=(difference_type i) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators", m_object)); + + case value_t::array: + { + std::advance(m_it.array_iterator, i); + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + m_it.primitive_iterator += i; + break; + } + } + + return *this; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator-=(difference_type i) + { + return operator+=(-i); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator+(difference_type i) const + { + auto result = *this; + result += i; + return result; + } + + /*! + @brief addition of distance and iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + friend iter_impl operator+(difference_type i, const iter_impl& it) + { + auto result = it; + result += i; + return result; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator-(difference_type i) const + { + auto result = *this; + result -= i; + return result; + } + + /*! + @brief return difference + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + difference_type operator-(const iter_impl& other) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators", m_object)); + + case value_t::array: + return m_it.array_iterator - other.m_it.array_iterator; + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + return m_it.primitive_iterator - other.m_it.primitive_iterator; + } + } + + /*! + @brief access to successor + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator[](difference_type n) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(208, "cannot use operator[] for object iterators", m_object)); + + case value_t::array: + return *std::next(m_it.array_iterator, n); + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.get_value() == -n)) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value", m_object)); + } + } + } + + /*! + @brief return the key of an object iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + const typename object_t::key_type& key() const + { + JSON_ASSERT(m_object != nullptr); + + if (JSON_HEDLEY_LIKELY(m_object->is_object())) + { + return m_it.object_iterator->first; + } + + JSON_THROW(invalid_iterator::create(207, "cannot use key() for non-object iterators", m_object)); + } + + /*! + @brief return the value of an iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference value() const + { + return operator*(); + } + + JSON_PRIVATE_UNLESS_TESTED: + /// associated JSON instance + pointer m_object = nullptr; + /// the actual iterator of the associated instance + internal_iterator<typename std::remove_const<BasicJsonType>::type> m_it {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/iteration_proxy.hpp> + +// #include <nlohmann/detail/iterators/json_reverse_iterator.hpp> + + +#include <cstddef> // ptrdiff_t +#include <iterator> // reverse_iterator +#include <utility> // declval + +namespace nlohmann +{ +namespace detail +{ +////////////////////// +// reverse_iterator // +////////////////////// + +/*! +@brief a template for a reverse iterator class + +@tparam Base the base iterator type to reverse. Valid types are @ref +iterator (to create @ref reverse_iterator) and @ref const_iterator (to +create @ref const_reverse_iterator). + +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +- [OutputIterator](https://en.cppreference.com/w/cpp/named_req/OutputIterator): + It is possible to write to the pointed-to element (only if @a Base is + @ref iterator). + +@since version 1.0.0 +*/ +template<typename Base> +class json_reverse_iterator : public std::reverse_iterator<Base> +{ + public: + using difference_type = std::ptrdiff_t; + /// shortcut to the reverse iterator adapter + using base_iterator = std::reverse_iterator<Base>; + /// the reference type for the pointed-to element + using reference = typename Base::reference; + + /// create reverse iterator from iterator + explicit json_reverse_iterator(const typename base_iterator::iterator_type& it) noexcept + : base_iterator(it) {} + + /// create reverse iterator from base class + explicit json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {} + + /// post-increment (it++) + json_reverse_iterator operator++(int)& // NOLINT(cert-dcl21-cpp) + { + return static_cast<json_reverse_iterator>(base_iterator::operator++(1)); + } + + /// pre-increment (++it) + json_reverse_iterator& operator++() + { + return static_cast<json_reverse_iterator&>(base_iterator::operator++()); + } + + /// post-decrement (it--) + json_reverse_iterator operator--(int)& // NOLINT(cert-dcl21-cpp) + { + return static_cast<json_reverse_iterator>(base_iterator::operator--(1)); + } + + /// pre-decrement (--it) + json_reverse_iterator& operator--() + { + return static_cast<json_reverse_iterator&>(base_iterator::operator--()); + } + + /// add to iterator + json_reverse_iterator& operator+=(difference_type i) + { + return static_cast<json_reverse_iterator&>(base_iterator::operator+=(i)); + } + + /// add to iterator + json_reverse_iterator operator+(difference_type i) const + { + return static_cast<json_reverse_iterator>(base_iterator::operator+(i)); + } + + /// subtract from iterator + json_reverse_iterator operator-(difference_type i) const + { + return static_cast<json_reverse_iterator>(base_iterator::operator-(i)); + } + + /// return difference + difference_type operator-(const json_reverse_iterator& other) const + { + return base_iterator(*this) - base_iterator(other); + } + + /// access to successor + reference operator[](difference_type n) const + { + return *(this->operator+(n)); + } + + /// return the key of an object iterator + auto key() const -> decltype(std::declval<Base>().key()) + { + auto it = --this->base(); + return it.key(); + } + + /// return the value of an iterator + reference value() const + { + auto it = --this->base(); + return it.operator * (); + } +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/iterators/primitive_iterator.hpp> + +// #include <nlohmann/detail/json_pointer.hpp> + + +#include <algorithm> // all_of +#include <cctype> // isdigit +#include <cerrno> // errno, ERANGE +#include <cstdlib> // strtoull +#include <limits> // max +#include <numeric> // accumulate +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/string_escape.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ + +/// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document +/// @sa https://json.nlohmann.me/api/json_pointer/ +template<typename RefStringType> +class json_pointer +{ + // allow basic_json to access private members + NLOHMANN_BASIC_JSON_TPL_DECLARATION + friend class basic_json; + + template<typename> + friend class json_pointer; + + template<typename T> + struct string_t_helper + { + using type = T; + }; + + NLOHMANN_BASIC_JSON_TPL_DECLARATION + struct string_t_helper<NLOHMANN_BASIC_JSON_TPL> + { + using type = StringType; + }; + + public: + // for backwards compatibility accept BasicJsonType + using string_t = typename string_t_helper<RefStringType>::type; + + /// @brief create JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/json_pointer/ + explicit json_pointer(const string_t& s = "") + : reference_tokens(split(s)) + {} + + /// @brief return a string representation of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/to_string/ + string_t to_string() const + { + return std::accumulate(reference_tokens.begin(), reference_tokens.end(), + string_t{}, + [](const string_t& a, const string_t& b) + { + return detail::concat(a, '/', detail::escape(b)); + }); + } + + /// @brief return a string representation of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_string/ + operator string_t() const + { + return to_string(); + } + + /// @brief append another JSON pointer at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(const json_pointer& ptr) + { + reference_tokens.insert(reference_tokens.end(), + ptr.reference_tokens.begin(), + ptr.reference_tokens.end()); + return *this; + } + + /// @brief append an unescaped reference token at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(string_t token) + { + push_back(std::move(token)); + return *this; + } + + /// @brief append an array index at the end of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slasheq/ + json_pointer& operator/=(std::size_t array_idx) + { + return *this /= std::to_string(array_idx); + } + + /// @brief create a new JSON pointer by appending the right JSON pointer at the end of the left JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, + const json_pointer& rhs) + { + return json_pointer(lhs) /= rhs; + } + + /// @brief create a new JSON pointer by appending the unescaped token at the end of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, string_t token) // NOLINT(performance-unnecessary-value-param) + { + return json_pointer(lhs) /= std::move(token); + } + + /// @brief create a new JSON pointer by appending the array-index-token at the end of the JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/operator_slash/ + friend json_pointer operator/(const json_pointer& lhs, std::size_t array_idx) + { + return json_pointer(lhs) /= array_idx; + } + + /// @brief returns the parent of this JSON pointer + /// @sa https://json.nlohmann.me/api/json_pointer/parent_pointer/ + json_pointer parent_pointer() const + { + if (empty()) + { + return *this; + } + + json_pointer res = *this; + res.pop_back(); + return res; + } + + /// @brief remove last reference token + /// @sa https://json.nlohmann.me/api/json_pointer/pop_back/ + void pop_back() + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + reference_tokens.pop_back(); + } + + /// @brief return last reference token + /// @sa https://json.nlohmann.me/api/json_pointer/back/ + const string_t& back() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + return reference_tokens.back(); + } + + /// @brief append an unescaped token at the end of the reference pointer + /// @sa https://json.nlohmann.me/api/json_pointer/push_back/ + void push_back(const string_t& token) + { + reference_tokens.push_back(token); + } + + /// @brief append an unescaped token at the end of the reference pointer + /// @sa https://json.nlohmann.me/api/json_pointer/push_back/ + void push_back(string_t&& token) + { + reference_tokens.push_back(std::move(token)); + } + + /// @brief return whether pointer points to the root document + /// @sa https://json.nlohmann.me/api/json_pointer/empty/ + bool empty() const noexcept + { + return reference_tokens.empty(); + } + + private: + /*! + @param[in] s reference token to be converted into an array index + + @return integer representation of @a s + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index begins not with a digit + @throw out_of_range.404 if string @a s could not be converted to an integer + @throw out_of_range.410 if an array index exceeds size_type + */ + template<typename BasicJsonType> + static typename BasicJsonType::size_type array_index(const string_t& s) + { + using size_type = typename BasicJsonType::size_type; + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && s[0] == '0')) + { + JSON_THROW(detail::parse_error::create(106, 0, detail::concat("array index '", s, "' must not begin with '0'"), nullptr)); + } + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && !(s[0] >= '1' && s[0] <= '9'))) + { + JSON_THROW(detail::parse_error::create(109, 0, detail::concat("array index '", s, "' is not a number"), nullptr)); + } + + const char* p = s.c_str(); + char* p_end = nullptr; + errno = 0; // strtoull doesn't reset errno + unsigned long long res = std::strtoull(p, &p_end, 10); // NOLINT(runtime/int) + if (p == p_end // invalid input or empty string + || errno == ERANGE // out of range + || JSON_HEDLEY_UNLIKELY(static_cast<std::size_t>(p_end - p) != s.size())) // incomplete read + { + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", s, "'"), nullptr)); + } + + // only triggered on special platforms (like 32bit), see also + // https://github.com/nlohmann/json/pull/2203 + if (res >= static_cast<unsigned long long>((std::numeric_limits<size_type>::max)())) // NOLINT(runtime/int) + { + JSON_THROW(detail::out_of_range::create(410, detail::concat("array index ", s, " exceeds size_type"), nullptr)); // LCOV_EXCL_LINE + } + + return static_cast<size_type>(res); + } + + JSON_PRIVATE_UNLESS_TESTED: + json_pointer top() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent", nullptr)); + } + + json_pointer result = *this; + result.reference_tokens = {reference_tokens[0]}; + return result; + } + + private: + /*! + @brief create and return a reference to the pointed to value + + @complexity Linear in the number of reference tokens. + + @throw parse_error.109 if array index is not a number + @throw type_error.313 if value cannot be unflattened + */ + template<typename BasicJsonType> + BasicJsonType& get_and_create(BasicJsonType& j) const + { + auto* result = &j; + + // in case no reference tokens exist, return a reference to the JSON value + // j which will be overwritten by a primitive value + for (const auto& reference_token : reference_tokens) + { + switch (result->type()) + { + case detail::value_t::null: + { + if (reference_token == "0") + { + // start a new array if reference token is 0 + result = &result->operator[](0); + } + else + { + // start a new object otherwise + result = &result->operator[](reference_token); + } + break; + } + + case detail::value_t::object: + { + // create an entry in the object + result = &result->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + // create an entry in the array + result = &result->operator[](array_index<BasicJsonType>(reference_token)); + break; + } + + /* + The following code is only reached if there exists a reference + token _and_ the current value is primitive. In this case, we have + an error situation, because primitive values may only occur as + single value; that is, with an empty list of reference tokens. + */ + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::type_error::create(313, "invalid value to unflatten", &j)); + } + } + + return *result; + } + + /*! + @brief return a reference to the pointed to value + + @note This version does not throw if a value is not present, but tries to + create nested values instead. For instance, calling this function + with pointer `"/this/that"` on a null value is equivalent to calling + `operator[]("this").operator[]("that")` on that value, effectively + changing the null value to an object. + + @param[in] ptr a JSON value + + @return reference to the JSON value pointed to by the JSON pointer + + @complexity Linear in the length of the JSON pointer. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + BasicJsonType& get_unchecked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + // convert null values to arrays or objects before continuing + if (ptr->is_null()) + { + // check if reference token is a number + const bool nums = + std::all_of(reference_token.begin(), reference_token.end(), + [](const unsigned char x) + { + return std::isdigit(x); + }); + + // change value to array for numbers or "-" or to object otherwise + *ptr = (nums || reference_token == "-") + ? detail::value_t::array + : detail::value_t::object; + } + + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (reference_token == "-") + { + // explicitly treat "-" as index beyond the end + ptr = &ptr->operator[](ptr->m_value.array->size()); + } + else + { + // convert array index to number; unchecked access + ptr = &ptr->operator[](array_index<BasicJsonType>(reference_token)); + } + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + BasicJsonType& get_checked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, detail::concat( + "array index '-' (", std::to_string(ptr->m_value.array->size()), + ") is out of range"), ptr)); + } + + // note: at performs range check + ptr = &ptr->at(array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @brief return a const reference to the pointed to value + + @param[in] ptr a JSON value + + @return const reference to the JSON value pointed to by the JSON + pointer + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + const BasicJsonType& get_unchecked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" cannot be used for const access + JSON_THROW(detail::out_of_range::create(402, detail::concat("array index '-' (", std::to_string(ptr->m_value.array->size()), ") is out of range"), ptr)); + } + + // use unchecked array access + ptr = &ptr->operator[](array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + template<typename BasicJsonType> + const BasicJsonType& get_checked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, detail::concat( + "array index '-' (", std::to_string(ptr->m_value.array->size()), + ") is out of range"), ptr)); + } + + // note: at performs range check + ptr = &ptr->at(array_index<BasicJsonType>(reference_token)); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + JSON_THROW(detail::out_of_range::create(404, detail::concat("unresolved reference token '", reference_token, "'"), ptr)); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + */ + template<typename BasicJsonType> + bool contains(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + if (!ptr->contains(reference_token)) + { + // we did not find the key in the object + return false; + } + + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() == 1 && !("0" <= reference_token && reference_token <= "9"))) + { + // invalid char + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1)) + { + if (JSON_HEDLEY_UNLIKELY(!('1' <= reference_token[0] && reference_token[0] <= '9'))) + { + // first char should be between '1' and '9' + return false; + } + for (std::size_t i = 1; i < reference_token.size(); i++) + { + if (JSON_HEDLEY_UNLIKELY(!('0' <= reference_token[i] && reference_token[i] <= '9'))) + { + // other char should be between '0' and '9' + return false; + } + } + } + + const auto idx = array_index<BasicJsonType>(reference_token); + if (idx >= ptr->size()) + { + // index out of range + return false; + } + + ptr = &ptr->operator[](idx); + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + { + // we do not expect primitive values if there is still a + // reference token to process + return false; + } + } + } + + // no reference token left means we found a primitive value + return true; + } + + /*! + @brief split the string input to reference tokens + + @note This function is only called by the json_pointer constructor. + All exceptions below are documented there. + + @throw parse_error.107 if the pointer is not empty or begins with '/' + @throw parse_error.108 if character '~' is not followed by '0' or '1' + */ + static std::vector<string_t> split(const string_t& reference_string) + { + std::vector<string_t> result; + + // special case: empty reference string -> no reference tokens + if (reference_string.empty()) + { + return result; + } + + // check if nonempty reference string begins with slash + if (JSON_HEDLEY_UNLIKELY(reference_string[0] != '/')) + { + JSON_THROW(detail::parse_error::create(107, 1, detail::concat("JSON pointer must be empty or begin with '/' - was: '", reference_string, "'"), nullptr)); + } + + // extract the reference tokens: + // - slash: position of the last read slash (or end of string) + // - start: position after the previous slash + for ( + // search for the first slash after the first character + std::size_t slash = reference_string.find_first_of('/', 1), + // set the beginning of the first reference token + start = 1; + // we can stop if start == 0 (if slash == string_t::npos) + start != 0; + // set the beginning of the next reference token + // (will eventually be 0 if slash == string_t::npos) + start = (slash == string_t::npos) ? 0 : slash + 1, + // find next slash + slash = reference_string.find_first_of('/', start)) + { + // use the text between the beginning of the reference token + // (start) and the last slash (slash). + auto reference_token = reference_string.substr(start, slash - start); + + // check reference tokens are properly escaped + for (std::size_t pos = reference_token.find_first_of('~'); + pos != string_t::npos; + pos = reference_token.find_first_of('~', pos + 1)) + { + JSON_ASSERT(reference_token[pos] == '~'); + + // ~ must be followed by 0 or 1 + if (JSON_HEDLEY_UNLIKELY(pos == reference_token.size() - 1 || + (reference_token[pos + 1] != '0' && + reference_token[pos + 1] != '1'))) + { + JSON_THROW(detail::parse_error::create(108, 0, "escape character '~' must be followed with '0' or '1'", nullptr)); + } + } + + // finally, store the reference token + detail::unescape(reference_token); + result.push_back(reference_token); + } + + return result; + } + + private: + /*! + @param[in] reference_string the reference string to the current value + @param[in] value the value to consider + @param[in,out] result the result object to insert values to + + @note Empty objects or arrays are flattened to `null`. + */ + template<typename BasicJsonType> + static void flatten(const string_t& reference_string, + const BasicJsonType& value, + BasicJsonType& result) + { + switch (value.type()) + { + case detail::value_t::array: + { + if (value.m_value.array->empty()) + { + // flatten empty array as null + result[reference_string] = nullptr; + } + else + { + // iterate array and use index as reference string + for (std::size_t i = 0; i < value.m_value.array->size(); ++i) + { + flatten(detail::concat(reference_string, '/', std::to_string(i)), + value.m_value.array->operator[](i), result); + } + } + break; + } + + case detail::value_t::object: + { + if (value.m_value.object->empty()) + { + // flatten empty object as null + result[reference_string] = nullptr; + } + else + { + // iterate object and use keys as reference string + for (const auto& element : *value.m_value.object) + { + flatten(detail::concat(reference_string, '/', detail::escape(element.first)), element.second, result); + } + } + break; + } + + case detail::value_t::null: + case detail::value_t::string: + case detail::value_t::boolean: + case detail::value_t::number_integer: + case detail::value_t::number_unsigned: + case detail::value_t::number_float: + case detail::value_t::binary: + case detail::value_t::discarded: + default: + { + // add primitive value with its reference string + result[reference_string] = value; + break; + } + } + } + + /*! + @param[in] value flattened JSON + + @return unflattened JSON + + @throw parse_error.109 if array index is not a number + @throw type_error.314 if value is not an object + @throw type_error.315 if object values are not primitive + @throw type_error.313 if value cannot be unflattened + */ + template<typename BasicJsonType> + static BasicJsonType + unflatten(const BasicJsonType& value) + { + if (JSON_HEDLEY_UNLIKELY(!value.is_object())) + { + JSON_THROW(detail::type_error::create(314, "only objects can be unflattened", &value)); + } + + BasicJsonType result; + + // iterate the JSON object values + for (const auto& element : *value.m_value.object) + { + if (JSON_HEDLEY_UNLIKELY(!element.second.is_primitive())) + { + JSON_THROW(detail::type_error::create(315, "values in object must be primitive", &element.second)); + } + + // assign value to reference pointed to by JSON pointer; Note that if + // the JSON pointer is "" (i.e., points to the whole value), function + // get_and_create returns a reference to result itself. An assignment + // will then create a primitive value. + json_pointer(element.first).get_and_create(result) = element.second; + } + + return result; + } + + // can't use conversion operator because of ambiguity + json_pointer<string_t> convert() const& + { + json_pointer<string_t> result; + result.reference_tokens = reference_tokens; + return result; + } + + json_pointer<string_t> convert()&& + { + json_pointer<string_t> result; + result.reference_tokens = std::move(reference_tokens); + return result; + } + + /*! + @brief compares two JSON pointers for equality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is equal to @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + template<typename RefStringTypeLhs, typename RefStringTypeRhs> + // NOLINTNEXTLINE(readability-redundant-declaration) + friend bool operator==(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept; + + /*! + @brief compares two JSON pointers for inequality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is not equal @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + template<typename RefStringTypeLhs, typename RefStringTypeRhs> + // NOLINTNEXTLINE(readability-redundant-declaration) + friend bool operator!=(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept; + + /// the reference tokens + std::vector<string_t> reference_tokens; +}; + +// functions cannot be defined inside class due to ODR violations +template<typename RefStringTypeLhs, typename RefStringTypeRhs> +inline bool operator==(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept +{ + return lhs.reference_tokens == rhs.reference_tokens; +} + +template<typename RefStringTypeLhs, typename RefStringTypeRhs> +inline bool operator!=(json_pointer<RefStringTypeLhs> const& lhs, + json_pointer<RefStringTypeRhs> const& rhs) noexcept +{ + return !(lhs == rhs); +} +} // namespace nlohmann + +// #include <nlohmann/detail/json_ref.hpp> + + +#include <initializer_list> +#include <utility> + +// #include <nlohmann/detail/meta/type_traits.hpp> + + +namespace nlohmann +{ +namespace detail +{ +template<typename BasicJsonType> +class json_ref +{ + public: + using value_type = BasicJsonType; + + json_ref(value_type&& value) + : owned_value(std::move(value)) + {} + + json_ref(const value_type& value) + : value_ref(&value) + {} + + json_ref(std::initializer_list<json_ref> init) + : owned_value(init) + {} + + template < + class... Args, + enable_if_t<std::is_constructible<value_type, Args...>::value, int> = 0 > + json_ref(Args && ... args) + : owned_value(std::forward<Args>(args)...) + {} + + // class should be movable only + json_ref(json_ref&&) noexcept = default; + json_ref(const json_ref&) = delete; + json_ref& operator=(const json_ref&) = delete; + json_ref& operator=(json_ref&&) = delete; + ~json_ref() = default; + + value_type moved_or_copied() const + { + if (value_ref == nullptr) + { + return std::move(owned_value); + } + return *value_ref; + } + + value_type const& operator*() const + { + return value_ref ? *value_ref : owned_value; + } + + value_type const* operator->() const + { + return &** this; + } + + private: + mutable value_type owned_value = nullptr; + value_type const* value_ref = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/string_escape.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/meta/type_traits.hpp> + +// #include <nlohmann/detail/output/binary_writer.hpp> + + +#include <algorithm> // reverse +#include <array> // array +#include <map> // map +#include <cmath> // isnan, isinf +#include <cstdint> // uint8_t, uint16_t, uint32_t, uint64_t +#include <cstring> // memcpy +#include <limits> // numeric_limits +#include <string> // string +#include <utility> // move +#include <vector> // vector + +// #include <nlohmann/detail/input/binary_reader.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/output/output_adapters.hpp> + + +#include <algorithm> // copy +#include <cstddef> // size_t +#include <iterator> // back_inserter +#include <memory> // shared_ptr, make_shared +#include <string> // basic_string +#include <vector> // vector + +#ifndef JSON_NO_IO + #include <ios> // streamsize + #include <ostream> // basic_ostream +#endif // JSON_NO_IO + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/// abstract output adapter interface +template<typename CharType> struct output_adapter_protocol +{ + virtual void write_character(CharType c) = 0; + virtual void write_characters(const CharType* s, std::size_t length) = 0; + virtual ~output_adapter_protocol() = default; + + output_adapter_protocol() = default; + output_adapter_protocol(const output_adapter_protocol&) = default; + output_adapter_protocol(output_adapter_protocol&&) noexcept = default; + output_adapter_protocol& operator=(const output_adapter_protocol&) = default; + output_adapter_protocol& operator=(output_adapter_protocol&&) noexcept = default; +}; + +/// a type to simplify interfaces +template<typename CharType> +using output_adapter_t = std::shared_ptr<output_adapter_protocol<CharType>>; + +/// output adapter for byte vectors +template<typename CharType, typename AllocatorType = std::allocator<CharType>> +class output_vector_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_vector_adapter(std::vector<CharType, AllocatorType>& vec) noexcept + : v(vec) + {} + + void write_character(CharType c) override + { + v.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + std::copy(s, s + length, std::back_inserter(v)); + } + + private: + std::vector<CharType, AllocatorType>& v; +}; + +#ifndef JSON_NO_IO +/// output adapter for output streams +template<typename CharType> +class output_stream_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_stream_adapter(std::basic_ostream<CharType>& s) noexcept + : stream(s) + {} + + void write_character(CharType c) override + { + stream.put(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + stream.write(s, static_cast<std::streamsize>(length)); + } + + private: + std::basic_ostream<CharType>& stream; +}; +#endif // JSON_NO_IO + +/// output adapter for basic_string +template<typename CharType, typename StringType = std::basic_string<CharType>> +class output_string_adapter : public output_adapter_protocol<CharType> +{ + public: + explicit output_string_adapter(StringType& s) noexcept + : str(s) + {} + + void write_character(CharType c) override + { + str.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + str.append(s, length); + } + + private: + StringType& str; +}; + +template<typename CharType, typename StringType = std::basic_string<CharType>> +class output_adapter +{ + public: + template<typename AllocatorType = std::allocator<CharType>> + output_adapter(std::vector<CharType, AllocatorType>& vec) + : oa(std::make_shared<output_vector_adapter<CharType, AllocatorType>>(vec)) {} + +#ifndef JSON_NO_IO + output_adapter(std::basic_ostream<CharType>& s) + : oa(std::make_shared<output_stream_adapter<CharType>>(s)) {} +#endif // JSON_NO_IO + + output_adapter(StringType& s) + : oa(std::make_shared<output_string_adapter<CharType, StringType>>(s)) {} + + operator output_adapter_t<CharType>() + { + return oa; + } + + private: + output_adapter_t<CharType> oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/string_concat.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// binary writer // +/////////////////// + +/*! +@brief serialization to CBOR and MessagePack values +*/ +template<typename BasicJsonType, typename CharType> +class binary_writer +{ + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using number_float_t = typename BasicJsonType::number_float_t; + + public: + /*! + @brief create a binary writer + + @param[in] adapter output adapter to write to + */ + explicit binary_writer(output_adapter_t<CharType> adapter) : oa(std::move(adapter)) + { + JSON_ASSERT(oa); + } + + /*! + @param[in] j JSON value to serialize + @pre j.type() == value_t::object + */ + void write_bson(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + { + write_bson_object(*j.m_value.object); + break; + } + + case value_t::null: + case value_t::array: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + JSON_THROW(type_error::create(317, concat("to serialize to BSON, top-level type must be object, but is ", j.type_name()), &j)); + } + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_cbor(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: + { + oa->write_character(to_char_type(0xF6)); + break; + } + + case value_t::boolean: + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xF5) + : to_char_type(0xF4)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // CBOR does not differentiate between positive signed + // integers and unsigned integers. Therefore, we used the + // code from the value_t::number_unsigned case here. + if (j.m_value.number_integer <= 0x17) + { + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + } + else + { + // The conversions below encode the sign in the first + // byte, and the value is converted to a positive number. + const auto positive_number = -1 - j.m_value.number_integer; + if (j.m_value.number_integer >= -24) + { + write_number(static_cast<std::uint8_t>(0x20 + positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x38)); + write_number(static_cast<std::uint8_t>(positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x39)); + write_number(static_cast<std::uint16_t>(positive_number)); + } + else if (positive_number <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x3A)); + write_number(static_cast<std::uint32_t>(positive_number)); + } + else + { + oa->write_character(to_char_type(0x3B)); + write_number(static_cast<std::uint64_t>(positive_number)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= 0x17) + { + write_number(static_cast<std::uint8_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast<std::uint8_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast<std::uint16_t>(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast<std::uint32_t>(j.m_value.number_unsigned)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast<std::uint64_t>(j.m_value.number_unsigned)); + } + break; + } + + case value_t::number_float: + { + if (std::isnan(j.m_value.number_float)) + { + // NaN is 0xf97e00 in CBOR + oa->write_character(to_char_type(0xF9)); + oa->write_character(to_char_type(0x7E)); + oa->write_character(to_char_type(0x00)); + } + else if (std::isinf(j.m_value.number_float)) + { + // Infinity is 0xf97c00, -Infinity is 0xf9fc00 + oa->write_character(to_char_type(0xf9)); + oa->write_character(j.m_value.number_float > 0 ? to_char_type(0x7C) : to_char_type(0xFC)); + oa->write_character(to_char_type(0x00)); + } + else + { + write_compact_float(j.m_value.number_float, detail::input_format_t::cbor); + } + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x60 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x78)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x79)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x7A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x7B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write the string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x80 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x98)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x99)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x9A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x9B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_cbor(el); + } + break; + } + + case value_t::binary: + { + if (j.m_value.binary->has_subtype()) + { + if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint8_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xd8)); + write_number(static_cast<std::uint8_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint16_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xd9)); + write_number(static_cast<std::uint16_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint32_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xda)); + write_number(static_cast<std::uint32_t>(j.m_value.binary->subtype())); + } + else if (j.m_value.binary->subtype() <= (std::numeric_limits<std::uint64_t>::max)()) + { + write_number(static_cast<std::uint8_t>(0xdb)); + write_number(static_cast<std::uint64_t>(j.m_value.binary->subtype())); + } + } + + // step 1: write control byte and the binary array size + const auto N = j.m_value.binary->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0x40 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0x58)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0x59)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0x5A)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0x5B)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 0x17) + { + write_number(static_cast<std::uint8_t>(0xA0 + N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + oa->write_character(to_char_type(0xB8)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + oa->write_character(to_char_type(0xB9)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + oa->write_character(to_char_type(0xBA)); + write_number(static_cast<std::uint32_t>(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits<std::uint64_t>::max)()) + { + oa->write_character(to_char_type(0xBB)); + write_number(static_cast<std::uint64_t>(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_cbor(el.first); + write_cbor(el.second); + } + break; + } + + case value_t::discarded: + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_msgpack(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: // nil + { + oa->write_character(to_char_type(0xC0)); + break; + } + + case value_t::boolean: // true and false + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xC3) + : to_char_type(0xC2)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // MessagePack does not differentiate between positive + // signed integers and unsigned integers. Therefore, we used + // the code from the value_t::number_unsigned case here. + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + } + else + { + if (j.m_value.number_integer >= -32) + { + // negative fixnum + write_number(static_cast<std::int8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int8_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int8_t>::max)()) + { + // int 8 + oa->write_character(to_char_type(0xD0)); + write_number(static_cast<std::int8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int16_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int16_t>::max)()) + { + // int 16 + oa->write_character(to_char_type(0xD1)); + write_number(static_cast<std::int16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int32_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int32_t>::max)()) + { + // int 32 + oa->write_character(to_char_type(0xD2)); + write_number(static_cast<std::int32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits<std::int64_t>::min)() && + j.m_value.number_integer <= (std::numeric_limits<std::int64_t>::max)()) + { + // int 64 + oa->write_character(to_char_type(0xD3)); + write_number(static_cast<std::int64_t>(j.m_value.number_integer)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast<std::uint8_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint16_t>::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast<std::uint16_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint32_t>::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast<std::uint32_t>(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast<std::uint64_t>(j.m_value.number_integer)); + } + break; + } + + case value_t::number_float: + { + write_compact_float(j.m_value.number_float, detail::input_format_t::msgpack); + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 31) + { + // fixstr + write_number(static_cast<std::uint8_t>(0xA0 | N)); + } + else if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + // str 8 + oa->write_character(to_char_type(0xD9)); + write_number(static_cast<std::uint8_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // str 16 + oa->write_character(to_char_type(0xDA)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // str 32 + oa->write_character(to_char_type(0xDB)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write the string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 15) + { + // fixarray + write_number(static_cast<std::uint8_t>(0x90 | N)); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // array 16 + oa->write_character(to_char_type(0xDC)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // array 32 + oa->write_character(to_char_type(0xDD)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_msgpack(el); + } + break; + } + + case value_t::binary: + { + // step 0: determine if the binary type has a set subtype to + // determine whether or not to use the ext or fixext types + const bool use_ext = j.m_value.binary->has_subtype(); + + // step 1: write control byte and the byte string length + const auto N = j.m_value.binary->size(); + if (N <= (std::numeric_limits<std::uint8_t>::max)()) + { + std::uint8_t output_type{}; + bool fixed = true; + if (use_ext) + { + switch (N) + { + case 1: + output_type = 0xD4; // fixext 1 + break; + case 2: + output_type = 0xD5; // fixext 2 + break; + case 4: + output_type = 0xD6; // fixext 4 + break; + case 8: + output_type = 0xD7; // fixext 8 + break; + case 16: + output_type = 0xD8; // fixext 16 + break; + default: + output_type = 0xC7; // ext 8 + fixed = false; + break; + } + + } + else + { + output_type = 0xC4; // bin 8 + fixed = false; + } + + oa->write_character(to_char_type(output_type)); + if (!fixed) + { + write_number(static_cast<std::uint8_t>(N)); + } + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC8 // ext 16 + : 0xC5; // bin 16 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC9 // ext 32 + : 0xC6; // bin 32 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 1.5: if this is an ext type, write the subtype + if (use_ext) + { + write_number(static_cast<std::int8_t>(j.m_value.binary->subtype())); + } + + // step 2: write the byte string + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 15) + { + // fixmap + write_number(static_cast<std::uint8_t>(0x80 | (N & 0xF))); + } + else if (N <= (std::numeric_limits<std::uint16_t>::max)()) + { + // map 16 + oa->write_character(to_char_type(0xDE)); + write_number(static_cast<std::uint16_t>(N)); + } + else if (N <= (std::numeric_limits<std::uint32_t>::max)()) + { + // map 32 + oa->write_character(to_char_type(0xDF)); + write_number(static_cast<std::uint32_t>(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_msgpack(el.first); + write_msgpack(el.second); + } + break; + } + + case value_t::discarded: + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + @param[in] use_count whether to use '#' prefixes (optimized format) + @param[in] use_type whether to use '$' prefixes (optimized format) + @param[in] add_prefix whether prefixes need to be used for this value + @param[in] use_bjdata whether write in BJData format, default is false + */ + void write_ubjson(const BasicJsonType& j, const bool use_count, + const bool use_type, const bool add_prefix = true, + const bool use_bjdata = false) + { + switch (j.type()) + { + case value_t::null: + { + if (add_prefix) + { + oa->write_character(to_char_type('Z')); + } + break; + } + + case value_t::boolean: + { + if (add_prefix) + { + oa->write_character(j.m_value.boolean + ? to_char_type('T') + : to_char_type('F')); + } + break; + } + + case value_t::number_integer: + { + write_number_with_ubjson_prefix(j.m_value.number_integer, add_prefix, use_bjdata); + break; + } + + case value_t::number_unsigned: + { + write_number_with_ubjson_prefix(j.m_value.number_unsigned, add_prefix, use_bjdata); + break; + } + + case value_t::number_float: + { + write_number_with_ubjson_prefix(j.m_value.number_float, add_prefix, use_bjdata); + break; + } + + case value_t::string: + { + if (add_prefix) + { + oa->write_character(to_char_type('S')); + } + write_number_with_ubjson_prefix(j.m_value.string->size(), true, use_bjdata); + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.array->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front(), use_bjdata); + const bool same_prefix = std::all_of(j.begin() + 1, j.end(), + [this, first_prefix, use_bjdata](const BasicJsonType & v) + { + return ubjson_prefix(v, use_bjdata) == first_prefix; + }); + + std::vector<CharType> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + if (same_prefix && !(use_bjdata && std::find(bjdx.begin(), bjdx.end(), first_prefix) != bjdx.end())) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.array->size(), true, use_bjdata); + } + + for (const auto& el : *j.m_value.array) + { + write_ubjson(el, use_count, use_type, prefix_required, use_bjdata); + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::binary: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + if (use_type && !j.m_value.binary->empty()) + { + JSON_ASSERT(use_count); + oa->write_character(to_char_type('$')); + oa->write_character('U'); + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.binary->size(), true, use_bjdata); + } + + if (use_type) + { + oa->write_characters( + reinterpret_cast<const CharType*>(j.m_value.binary->data()), + j.m_value.binary->size()); + } + else + { + for (size_t i = 0; i < j.m_value.binary->size(); ++i) + { + oa->write_character(to_char_type('U')); + oa->write_character(j.m_value.binary->data()[i]); + } + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::object: + { + if (use_bjdata && j.m_value.object->size() == 3 && j.m_value.object->find("_ArrayType_") != j.m_value.object->end() && j.m_value.object->find("_ArraySize_") != j.m_value.object->end() && j.m_value.object->find("_ArrayData_") != j.m_value.object->end()) + { + if (!write_bjdata_ndarray(*j.m_value.object, use_count, use_type)) // decode bjdata ndarray in the JData format (https://github.com/NeuroJSON/jdata) + { + break; + } + } + + if (add_prefix) + { + oa->write_character(to_char_type('{')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.object->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front(), use_bjdata); + const bool same_prefix = std::all_of(j.begin(), j.end(), + [this, first_prefix, use_bjdata](const BasicJsonType & v) + { + return ubjson_prefix(v, use_bjdata) == first_prefix; + }); + + std::vector<CharType> bjdx = {'[', '{', 'S', 'H', 'T', 'F', 'N', 'Z'}; // excluded markers in bjdata optimized type + + if (same_prefix && !(use_bjdata && std::find(bjdx.begin(), bjdx.end(), first_prefix) != bjdx.end())) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.object->size(), true, use_bjdata); + } + + for (const auto& el : *j.m_value.object) + { + write_number_with_ubjson_prefix(el.first.size(), true, use_bjdata); + oa->write_characters( + reinterpret_cast<const CharType*>(el.first.c_str()), + el.first.size()); + write_ubjson(el.second, use_count, use_type, prefix_required, use_bjdata); + } + + if (!use_count) + { + oa->write_character(to_char_type('}')); + } + + break; + } + + case value_t::discarded: + default: + break; + } + } + + private: + ////////// + // BSON // + ////////// + + /*! + @return The size of a BSON document entry header, including the id marker + and the entry name size (and its null-terminator). + */ + static std::size_t calc_bson_entry_header_size(const string_t& name, const BasicJsonType& j) + { + const auto it = name.find(static_cast<typename string_t::value_type>(0)); + if (JSON_HEDLEY_UNLIKELY(it != BasicJsonType::string_t::npos)) + { + JSON_THROW(out_of_range::create(409, concat("BSON key cannot contain code point U+0000 (at byte ", std::to_string(it), ")"), &j)); + static_cast<void>(j); + } + + return /*id*/ 1ul + name.size() + /*zero-terminator*/1u; + } + + /*! + @brief Writes the given @a element_type and @a name to the output adapter + */ + void write_bson_entry_header(const string_t& name, + const std::uint8_t element_type) + { + oa->write_character(to_char_type(element_type)); // boolean + oa->write_characters( + reinterpret_cast<const CharType*>(name.c_str()), + name.size() + 1u); + } + + /*! + @brief Writes a BSON element with key @a name and boolean value @a value + */ + void write_bson_boolean(const string_t& name, + const bool value) + { + write_bson_entry_header(name, 0x08); + oa->write_character(value ? to_char_type(0x01) : to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and double value @a value + */ + void write_bson_double(const string_t& name, + const double value) + { + write_bson_entry_header(name, 0x01); + write_number<double>(value, true); + } + + /*! + @return The size of the BSON-encoded string in @a value + */ + static std::size_t calc_bson_string_size(const string_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and string value @a value + */ + void write_bson_string(const string_t& name, + const string_t& value) + { + write_bson_entry_header(name, 0x02); + + write_number<std::int32_t>(static_cast<std::int32_t>(value.size() + 1ul), true); + oa->write_characters( + reinterpret_cast<const CharType*>(value.c_str()), + value.size() + 1); + } + + /*! + @brief Writes a BSON element with key @a name and null value + */ + void write_bson_null(const string_t& name) + { + write_bson_entry_header(name, 0x0A); + } + + /*! + @return The size of the BSON-encoded integer @a value + */ + static std::size_t calc_bson_integer_size(const std::int64_t value) + { + return (std::numeric_limits<std::int32_t>::min)() <= value && value <= (std::numeric_limits<std::int32_t>::max)() + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and integer @a value + */ + void write_bson_integer(const string_t& name, + const std::int64_t value) + { + if ((std::numeric_limits<std::int32_t>::min)() <= value && value <= (std::numeric_limits<std::int32_t>::max)()) + { + write_bson_entry_header(name, 0x10); // int32 + write_number<std::int32_t>(static_cast<std::int32_t>(value), true); + } + else + { + write_bson_entry_header(name, 0x12); // int64 + write_number<std::int64_t>(static_cast<std::int64_t>(value), true); + } + } + + /*! + @return The size of the BSON-encoded unsigned integer in @a j + */ + static constexpr std::size_t calc_bson_unsigned_size(const std::uint64_t value) noexcept + { + return (value <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and unsigned @a value + */ + void write_bson_unsigned(const string_t& name, + const BasicJsonType& j) + { + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + write_bson_entry_header(name, 0x10 /* int32 */); + write_number<std::int32_t>(static_cast<std::int32_t>(j.m_value.number_unsigned), true); + } + else if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + write_bson_entry_header(name, 0x12 /* int64 */); + write_number<std::int64_t>(static_cast<std::int64_t>(j.m_value.number_unsigned), true); + } + else + { + JSON_THROW(out_of_range::create(407, concat("integer number ", std::to_string(j.m_value.number_unsigned), " cannot be represented by BSON as it does not fit int64"), &j)); + } + } + + /*! + @brief Writes a BSON element with key @a name and object @a value + */ + void write_bson_object_entry(const string_t& name, + const typename BasicJsonType::object_t& value) + { + write_bson_entry_header(name, 0x03); // object + write_bson_object(value); + } + + /*! + @return The size of the BSON-encoded array @a value + */ + static std::size_t calc_bson_array_size(const typename BasicJsonType::array_t& value) + { + std::size_t array_index = 0ul; + + const std::size_t embedded_document_size = std::accumulate(std::begin(value), std::end(value), static_cast<std::size_t>(0), [&array_index](std::size_t result, const typename BasicJsonType::array_t::value_type & el) + { + return result + calc_bson_element_size(std::to_string(array_index++), el); + }); + + return sizeof(std::int32_t) + embedded_document_size + 1ul; + } + + /*! + @return The size of the BSON-encoded binary array @a value + */ + static std::size_t calc_bson_binary_size(const typename BasicJsonType::binary_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and array @a value + */ + void write_bson_array(const string_t& name, + const typename BasicJsonType::array_t& value) + { + write_bson_entry_header(name, 0x04); // array + write_number<std::int32_t>(static_cast<std::int32_t>(calc_bson_array_size(value)), true); + + std::size_t array_index = 0ul; + + for (const auto& el : value) + { + write_bson_element(std::to_string(array_index++), el); + } + + oa->write_character(to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and binary value @a value + */ + void write_bson_binary(const string_t& name, + const binary_t& value) + { + write_bson_entry_header(name, 0x05); + + write_number<std::int32_t>(static_cast<std::int32_t>(value.size()), true); + write_number(value.has_subtype() ? static_cast<std::uint8_t>(value.subtype()) : static_cast<std::uint8_t>(0x00)); + + oa->write_characters(reinterpret_cast<const CharType*>(value.data()), value.size()); + } + + /*! + @brief Calculates the size necessary to serialize the JSON value @a j with its @a name + @return The calculated size for the BSON document entry for @a j with the given @a name. + */ + static std::size_t calc_bson_element_size(const string_t& name, + const BasicJsonType& j) + { + const auto header_size = calc_bson_entry_header_size(name, j); + switch (j.type()) + { + case value_t::object: + return header_size + calc_bson_object_size(*j.m_value.object); + + case value_t::array: + return header_size + calc_bson_array_size(*j.m_value.array); + + case value_t::binary: + return header_size + calc_bson_binary_size(*j.m_value.binary); + + case value_t::boolean: + return header_size + 1ul; + + case value_t::number_float: + return header_size + 8ul; + + case value_t::number_integer: + return header_size + calc_bson_integer_size(j.m_value.number_integer); + + case value_t::number_unsigned: + return header_size + calc_bson_unsigned_size(j.m_value.number_unsigned); + + case value_t::string: + return header_size + calc_bson_string_size(*j.m_value.string); + + case value_t::null: + return header_size + 0ul; + + // LCOV_EXCL_START + case value_t::discarded: + default: + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) + return 0ul; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Serializes the JSON value @a j to BSON and associates it with the + key @a name. + @param name The name to associate with the JSON entity @a j within the + current BSON document + */ + void write_bson_element(const string_t& name, + const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + return write_bson_object_entry(name, *j.m_value.object); + + case value_t::array: + return write_bson_array(name, *j.m_value.array); + + case value_t::binary: + return write_bson_binary(name, *j.m_value.binary); + + case value_t::boolean: + return write_bson_boolean(name, j.m_value.boolean); + + case value_t::number_float: + return write_bson_double(name, j.m_value.number_float); + + case value_t::number_integer: + return write_bson_integer(name, j.m_value.number_integer); + + case value_t::number_unsigned: + return write_bson_unsigned(name, j); + + case value_t::string: + return write_bson_string(name, *j.m_value.string); + + case value_t::null: + return write_bson_null(name); + + // LCOV_EXCL_START + case value_t::discarded: + default: + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) + return; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Calculates the size of the BSON serialization of the given + JSON-object @a j. + @param[in] value JSON value to serialize + @pre value.type() == value_t::object + */ + static std::size_t calc_bson_object_size(const typename BasicJsonType::object_t& value) + { + std::size_t document_size = std::accumulate(value.begin(), value.end(), static_cast<std::size_t>(0), + [](size_t result, const typename BasicJsonType::object_t::value_type & el) + { + return result += calc_bson_element_size(el.first, el.second); + }); + + return sizeof(std::int32_t) + document_size + 1ul; + } + + /*! + @param[in] value JSON value to serialize + @pre value.type() == value_t::object + */ + void write_bson_object(const typename BasicJsonType::object_t& value) + { + write_number<std::int32_t>(static_cast<std::int32_t>(calc_bson_object_size(value)), true); + + for (const auto& el : value) + { + write_bson_element(el.first, el.second); + } + + oa->write_character(to_char_type(0x00)); + } + + ////////// + // CBOR // + ////////// + + static constexpr CharType get_cbor_float_prefix(float /*unused*/) + { + return to_char_type(0xFA); // Single-Precision Float + } + + static constexpr CharType get_cbor_float_prefix(double /*unused*/) + { + return to_char_type(0xFB); // Double-Precision Float + } + + ///////////// + // MsgPack // + ///////////// + + static constexpr CharType get_msgpack_float_prefix(float /*unused*/) + { + return to_char_type(0xCA); // float 32 + } + + static constexpr CharType get_msgpack_float_prefix(double /*unused*/) + { + return to_char_type(0xCB); // float 64 + } + + //////////// + // UBJSON // + //////////// + + // UBJSON: write number (floating point) + template<typename NumberType, typename std::enable_if< + std::is_floating_point<NumberType>::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if (add_prefix) + { + oa->write_character(get_ubjson_float_prefix(n)); + } + write_number(n, use_bjdata); + } + + // UBJSON: write number (unsigned integer) + template<typename NumberType, typename std::enable_if< + std::is_unsigned<NumberType>::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int8_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if (n <= (std::numeric_limits<std::uint8_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int16_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast<std::int16_t>(n), use_bjdata); + } + else if (use_bjdata && n <= static_cast<uint64_t>((std::numeric_limits<uint16_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('u')); // uint16 - bjdata only + } + write_number(static_cast<std::uint16_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast<std::int32_t>(n), use_bjdata); + } + else if (use_bjdata && n <= static_cast<uint64_t>((std::numeric_limits<uint32_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('m')); // uint32 - bjdata only + } + write_number(static_cast<std::uint32_t>(n), use_bjdata); + } + else if (n <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast<std::int64_t>(n), use_bjdata); + } + else if (use_bjdata && n <= (std::numeric_limits<uint64_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('M')); // uint64 - bjdata only + } + write_number(static_cast<std::uint64_t>(n), use_bjdata); + } + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true, use_bjdata); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast<std::uint8_t>(number[i]))); + } + } + } + + // UBJSON: write number (signed integer) + template < typename NumberType, typename std::enable_if < + std::is_signed<NumberType>::value&& + !std::is_floating_point<NumberType>::value, int >::type = 0 > + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix, + const bool use_bjdata) + { + if ((std::numeric_limits<std::int8_t>::min)() <= n && n <= (std::numeric_limits<std::int8_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast<std::int8_t>(n), use_bjdata); + } + else if (static_cast<std::int64_t>((std::numeric_limits<std::uint8_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint8_t>::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast<std::uint8_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int16_t>::min)() <= n && n <= (std::numeric_limits<std::int16_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast<std::int16_t>(n), use_bjdata); + } + else if (use_bjdata && (static_cast<std::int64_t>((std::numeric_limits<std::uint16_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint16_t>::max)()))) + { + if (add_prefix) + { + oa->write_character(to_char_type('u')); // uint16 - bjdata only + } + write_number(static_cast<uint16_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int32_t>::min)() <= n && n <= (std::numeric_limits<std::int32_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast<std::int32_t>(n), use_bjdata); + } + else if (use_bjdata && (static_cast<std::int64_t>((std::numeric_limits<std::uint32_t>::min)()) <= n && n <= static_cast<std::int64_t>((std::numeric_limits<std::uint32_t>::max)()))) + { + if (add_prefix) + { + oa->write_character(to_char_type('m')); // uint32 - bjdata only + } + write_number(static_cast<uint32_t>(n), use_bjdata); + } + else if ((std::numeric_limits<std::int64_t>::min)() <= n && n <= (std::numeric_limits<std::int64_t>::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast<std::int64_t>(n), use_bjdata); + } + // LCOV_EXCL_START + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true, use_bjdata); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast<std::uint8_t>(number[i]))); + } + } + // LCOV_EXCL_STOP + } + + /*! + @brief determine the type prefix of container values + */ + CharType ubjson_prefix(const BasicJsonType& j, const bool use_bjdata) const noexcept + { + switch (j.type()) + { + case value_t::null: + return 'Z'; + + case value_t::boolean: + return j.m_value.boolean ? 'T' : 'F'; + + case value_t::number_integer: + { + if ((std::numeric_limits<std::int8_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int8_t>::max)()) + { + return 'i'; + } + if ((std::numeric_limits<std::uint8_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint8_t>::max)()) + { + return 'U'; + } + if ((std::numeric_limits<std::int16_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int16_t>::max)()) + { + return 'I'; + } + if (use_bjdata && ((std::numeric_limits<std::uint16_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint16_t>::max)())) + { + return 'u'; + } + if ((std::numeric_limits<std::int32_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int32_t>::max)()) + { + return 'l'; + } + if (use_bjdata && ((std::numeric_limits<std::uint32_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::uint32_t>::max)())) + { + return 'm'; + } + if ((std::numeric_limits<std::int64_t>::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits<std::int64_t>::max)()) + { + return 'L'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int8_t>::max)())) + { + return 'i'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint8_t>::max)())) + { + return 'U'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int16_t>::max)())) + { + return 'I'; + } + if (use_bjdata && j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint16_t>::max)())) + { + return 'u'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)())) + { + return 'l'; + } + if (use_bjdata && j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint32_t>::max)())) + { + return 'm'; + } + if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int64_t>::max)())) + { + return 'L'; + } + if (use_bjdata && j.m_value.number_unsigned <= (std::numeric_limits<std::uint64_t>::max)()) + { + return 'M'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_float: + return get_ubjson_float_prefix(j.m_value.number_float); + + case value_t::string: + return 'S'; + + case value_t::array: // fallthrough + case value_t::binary: + return '['; + + case value_t::object: + return '{'; + + case value_t::discarded: + default: // discarded values + return 'N'; + } + } + + static constexpr CharType get_ubjson_float_prefix(float /*unused*/) + { + return 'd'; // float 32 + } + + static constexpr CharType get_ubjson_float_prefix(double /*unused*/) + { + return 'D'; // float 64 + } + + /*! + @return false if the object is successfully converted to a bjdata ndarray, true if the type or size is invalid + */ + bool write_bjdata_ndarray(const typename BasicJsonType::object_t& value, const bool use_count, const bool use_type) + { + std::map<string_t, CharType> bjdtype = {{"uint8", 'U'}, {"int8", 'i'}, {"uint16", 'u'}, {"int16", 'I'}, + {"uint32", 'm'}, {"int32", 'l'}, {"uint64", 'M'}, {"int64", 'L'}, {"single", 'd'}, {"double", 'D'}, {"char", 'C'} + }; + + string_t key = "_ArrayType_"; + auto it = bjdtype.find(static_cast<string_t>(value.at(key))); + if (it == bjdtype.end()) + { + return true; + } + CharType dtype = it->second; + + key = "_ArraySize_"; + std::size_t len = (value.at(key).empty() ? 0 : 1); + for (const auto& el : value.at(key)) + { + len *= static_cast<std::size_t>(el.m_value.number_unsigned); + } + + key = "_ArrayData_"; + if (value.at(key).size() != len) + { + return true; + } + + oa->write_character('['); + oa->write_character('$'); + oa->write_character(dtype); + oa->write_character('#'); + + key = "_ArraySize_"; + write_ubjson(value.at(key), use_count, use_type, true, true); + + key = "_ArrayData_"; + if (dtype == 'U' || dtype == 'C') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint8_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'i') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int8_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'u') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint16_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'I') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int16_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'm') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint32_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'l') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int32_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'M') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::uint64_t>(el.m_value.number_unsigned), true); + } + } + else if (dtype == 'L') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<std::int64_t>(el.m_value.number_integer), true); + } + } + else if (dtype == 'd') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<float>(el.m_value.number_float), true); + } + } + else if (dtype == 'D') + { + for (const auto& el : value.at(key)) + { + write_number(static_cast<double>(el.m_value.number_float), true); + } + } + return false; + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /* + @brief write a number to output input + @param[in] n number of type @a NumberType + @param[in] OutputIsLittleEndian Set to true if output data is + required to be little endian + @tparam NumberType the type of the number + + @note This function needs to respect the system's endianness, because bytes + in CBOR, MessagePack, and UBJSON are stored in network order (big + endian) and therefore need reordering on little endian systems. + On the other hand, BSON and BJData use little endian and should reorder + on big endian systems. + */ + template<typename NumberType> + void write_number(const NumberType n, const bool OutputIsLittleEndian = false) + { + // step 1: write number to array of length NumberType + std::array<CharType, sizeof(NumberType)> vec{}; + std::memcpy(vec.data(), &n, sizeof(NumberType)); + + // step 2: write array to output (with possible reordering) + if (is_little_endian != OutputIsLittleEndian) + { + // reverse byte order prior to conversion if necessary + std::reverse(vec.begin(), vec.end()); + } + + oa->write_characters(vec.data(), sizeof(NumberType)); + } + + void write_compact_float(const number_float_t n, detail::input_format_t format) + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + if (static_cast<double>(n) >= static_cast<double>(std::numeric_limits<float>::lowest()) && + static_cast<double>(n) <= static_cast<double>((std::numeric_limits<float>::max)()) && + static_cast<double>(static_cast<float>(n)) == static_cast<double>(n)) + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(static_cast<float>(n)) + : get_msgpack_float_prefix(static_cast<float>(n))); + write_number(static_cast<float>(n)); + } + else + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(n) + : get_msgpack_float_prefix(n)); + write_number(n); + } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + public: + // The following to_char_type functions are implement the conversion + // between uint8_t and CharType. In case CharType is not unsigned, + // such a conversion is required to allow values greater than 128. + // See <https://github.com/nlohmann/json/issues/1286> for a discussion. + template < typename C = CharType, + enable_if_t < std::is_signed<C>::value && std::is_signed<char>::value > * = nullptr > + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return *reinterpret_cast<char*>(&x); + } + + template < typename C = CharType, + enable_if_t < std::is_signed<C>::value && std::is_unsigned<char>::value > * = nullptr > + static CharType to_char_type(std::uint8_t x) noexcept + { + static_assert(sizeof(std::uint8_t) == sizeof(CharType), "size of CharType must be equal to std::uint8_t"); + static_assert(std::is_trivial<CharType>::value, "CharType must be trivial"); + CharType result; + std::memcpy(&result, &x, sizeof(x)); + return result; + } + + template<typename C = CharType, + enable_if_t<std::is_unsigned<C>::value>* = nullptr> + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return x; + } + + template < typename InputCharType, typename C = CharType, + enable_if_t < + std::is_signed<C>::value && + std::is_signed<char>::value && + std::is_same<char, typename std::remove_cv<InputCharType>::type>::value + > * = nullptr > + static constexpr CharType to_char_type(InputCharType x) noexcept + { + return x; + } + + private: + /// whether we can assume little endianness + const bool is_little_endian = little_endianness(); + + /// the output + output_adapter_t<CharType> oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/output/output_adapters.hpp> + +// #include <nlohmann/detail/output/serializer.hpp> + + +#include <algorithm> // reverse, remove, fill, find, none_of +#include <array> // array +#include <clocale> // localeconv, lconv +#include <cmath> // labs, isfinite, isnan, signbit +#include <cstddef> // size_t, ptrdiff_t +#include <cstdint> // uint8_t +#include <cstdio> // snprintf +#include <limits> // numeric_limits +#include <string> // string, char_traits +#include <iomanip> // setfill, setw +#include <type_traits> // is_same +#include <utility> // move + +// #include <nlohmann/detail/conversions/to_chars.hpp> + + +#include <array> // array +#include <cmath> // signbit, isfinite +#include <cstdint> // intN_t, uintN_t +#include <cstring> // memcpy, memmove +#include <limits> // numeric_limits +#include <type_traits> // conditional + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ +namespace detail +{ + +/*! +@brief implements the Grisu2 algorithm for binary to decimal floating-point +conversion. + +This implementation is a slightly modified version of the reference +implementation which may be obtained from +http://florian.loitsch.com/publications (bench.tar.gz). + +The code is distributed under the MIT license, Copyright (c) 2009 Florian Loitsch. + +For a detailed description of the algorithm see: + +[1] Loitsch, "Printing Floating-Point Numbers Quickly and Accurately with + Integers", Proceedings of the ACM SIGPLAN 2010 Conference on Programming + Language Design and Implementation, PLDI 2010 +[2] Burger, Dybvig, "Printing Floating-Point Numbers Quickly and Accurately", + Proceedings of the ACM SIGPLAN 1996 Conference on Programming Language + Design and Implementation, PLDI 1996 +*/ +namespace dtoa_impl +{ + +template<typename Target, typename Source> +Target reinterpret_bits(const Source source) +{ + static_assert(sizeof(Target) == sizeof(Source), "size mismatch"); + + Target target; + std::memcpy(&target, &source, sizeof(Source)); + return target; +} + +struct diyfp // f * 2^e +{ + static constexpr int kPrecision = 64; // = q + + std::uint64_t f = 0; + int e = 0; + + constexpr diyfp(std::uint64_t f_, int e_) noexcept : f(f_), e(e_) {} + + /*! + @brief returns x - y + @pre x.e == y.e and x.f >= y.f + */ + static diyfp sub(const diyfp& x, const diyfp& y) noexcept + { + JSON_ASSERT(x.e == y.e); + JSON_ASSERT(x.f >= y.f); + + return {x.f - y.f, x.e}; + } + + /*! + @brief returns x * y + @note The result is rounded. (Only the upper q bits are returned.) + */ + static diyfp mul(const diyfp& x, const diyfp& y) noexcept + { + static_assert(kPrecision == 64, "internal error"); + + // Computes: + // f = round((x.f * y.f) / 2^q) + // e = x.e + y.e + q + + // Emulate the 64-bit * 64-bit multiplication: + // + // p = u * v + // = (u_lo + 2^32 u_hi) (v_lo + 2^32 v_hi) + // = (u_lo v_lo ) + 2^32 ((u_lo v_hi ) + (u_hi v_lo )) + 2^64 (u_hi v_hi ) + // = (p0 ) + 2^32 ((p1 ) + (p2 )) + 2^64 (p3 ) + // = (p0_lo + 2^32 p0_hi) + 2^32 ((p1_lo + 2^32 p1_hi) + (p2_lo + 2^32 p2_hi)) + 2^64 (p3 ) + // = (p0_lo ) + 2^32 (p0_hi + p1_lo + p2_lo ) + 2^64 (p1_hi + p2_hi + p3) + // = (p0_lo ) + 2^32 (Q ) + 2^64 (H ) + // = (p0_lo ) + 2^32 (Q_lo + 2^32 Q_hi ) + 2^64 (H ) + // + // (Since Q might be larger than 2^32 - 1) + // + // = (p0_lo + 2^32 Q_lo) + 2^64 (Q_hi + H) + // + // (Q_hi + H does not overflow a 64-bit int) + // + // = p_lo + 2^64 p_hi + + const std::uint64_t u_lo = x.f & 0xFFFFFFFFu; + const std::uint64_t u_hi = x.f >> 32u; + const std::uint64_t v_lo = y.f & 0xFFFFFFFFu; + const std::uint64_t v_hi = y.f >> 32u; + + const std::uint64_t p0 = u_lo * v_lo; + const std::uint64_t p1 = u_lo * v_hi; + const std::uint64_t p2 = u_hi * v_lo; + const std::uint64_t p3 = u_hi * v_hi; + + const std::uint64_t p0_hi = p0 >> 32u; + const std::uint64_t p1_lo = p1 & 0xFFFFFFFFu; + const std::uint64_t p1_hi = p1 >> 32u; + const std::uint64_t p2_lo = p2 & 0xFFFFFFFFu; + const std::uint64_t p2_hi = p2 >> 32u; + + std::uint64_t Q = p0_hi + p1_lo + p2_lo; + + // The full product might now be computed as + // + // p_hi = p3 + p2_hi + p1_hi + (Q >> 32) + // p_lo = p0_lo + (Q << 32) + // + // But in this particular case here, the full p_lo is not required. + // Effectively we only need to add the highest bit in p_lo to p_hi (and + // Q_hi + 1 does not overflow). + + Q += std::uint64_t{1} << (64u - 32u - 1u); // round, ties up + + const std::uint64_t h = p3 + p2_hi + p1_hi + (Q >> 32u); + + return {h, x.e + y.e + 64}; + } + + /*! + @brief normalize x such that the significand is >= 2^(q-1) + @pre x.f != 0 + */ + static diyfp normalize(diyfp x) noexcept + { + JSON_ASSERT(x.f != 0); + + while ((x.f >> 63u) == 0) + { + x.f <<= 1u; + x.e--; + } + + return x; + } + + /*! + @brief normalize x such that the result has the exponent E + @pre e >= x.e and the upper e - x.e bits of x.f must be zero. + */ + static diyfp normalize_to(const diyfp& x, const int target_exponent) noexcept + { + const int delta = x.e - target_exponent; + + JSON_ASSERT(delta >= 0); + JSON_ASSERT(((x.f << delta) >> delta) == x.f); + + return {x.f << delta, target_exponent}; + } +}; + +struct boundaries +{ + diyfp w; + diyfp minus; + diyfp plus; +}; + +/*! +Compute the (normalized) diyfp representing the input number 'value' and its +boundaries. + +@pre value must be finite and positive +*/ +template<typename FloatType> +boundaries compute_boundaries(FloatType value) +{ + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // Convert the IEEE representation into a diyfp. + // + // If v is denormal: + // value = 0.F * 2^(1 - bias) = ( F) * 2^(1 - bias - (p-1)) + // If v is normalized: + // value = 1.F * 2^(E - bias) = (2^(p-1) + F) * 2^(E - bias - (p-1)) + + static_assert(std::numeric_limits<FloatType>::is_iec559, + "internal error: dtoa_short requires an IEEE-754 floating-point implementation"); + + constexpr int kPrecision = std::numeric_limits<FloatType>::digits; // = p (includes the hidden bit) + constexpr int kBias = std::numeric_limits<FloatType>::max_exponent - 1 + (kPrecision - 1); + constexpr int kMinExp = 1 - kBias; + constexpr std::uint64_t kHiddenBit = std::uint64_t{1} << (kPrecision - 1); // = 2^(p-1) + + using bits_type = typename std::conditional<kPrecision == 24, std::uint32_t, std::uint64_t >::type; + + const auto bits = static_cast<std::uint64_t>(reinterpret_bits<bits_type>(value)); + const std::uint64_t E = bits >> (kPrecision - 1); + const std::uint64_t F = bits & (kHiddenBit - 1); + + const bool is_denormal = E == 0; + const diyfp v = is_denormal + ? diyfp(F, kMinExp) + : diyfp(F + kHiddenBit, static_cast<int>(E) - kBias); + + // Compute the boundaries m- and m+ of the floating-point value + // v = f * 2^e. + // + // Determine v- and v+, the floating-point predecessor and successor if v, + // respectively. + // + // v- = v - 2^e if f != 2^(p-1) or e == e_min (A) + // = v - 2^(e-1) if f == 2^(p-1) and e > e_min (B) + // + // v+ = v + 2^e + // + // Let m- = (v- + v) / 2 and m+ = (v + v+) / 2. All real numbers _strictly_ + // between m- and m+ round to v, regardless of how the input rounding + // algorithm breaks ties. + // + // ---+-------------+-------------+-------------+-------------+--- (A) + // v- m- v m+ v+ + // + // -----------------+------+------+-------------+-------------+--- (B) + // v- m- v m+ v+ + + const bool lower_boundary_is_closer = F == 0 && E > 1; + const diyfp m_plus = diyfp(2 * v.f + 1, v.e - 1); + const diyfp m_minus = lower_boundary_is_closer + ? diyfp(4 * v.f - 1, v.e - 2) // (B) + : diyfp(2 * v.f - 1, v.e - 1); // (A) + + // Determine the normalized w+ = m+. + const diyfp w_plus = diyfp::normalize(m_plus); + + // Determine w- = m- such that e_(w-) = e_(w+). + const diyfp w_minus = diyfp::normalize_to(m_minus, w_plus.e); + + return {diyfp::normalize(v), w_minus, w_plus}; +} + +// Given normalized diyfp w, Grisu needs to find a (normalized) cached +// power-of-ten c, such that the exponent of the product c * w = f * 2^e lies +// within a certain range [alpha, gamma] (Definition 3.2 from [1]) +// +// alpha <= e = e_c + e_w + q <= gamma +// +// or +// +// f_c * f_w * 2^alpha <= f_c 2^(e_c) * f_w 2^(e_w) * 2^q +// <= f_c * f_w * 2^gamma +// +// Since c and w are normalized, i.e. 2^(q-1) <= f < 2^q, this implies +// +// 2^(q-1) * 2^(q-1) * 2^alpha <= c * w * 2^q < 2^q * 2^q * 2^gamma +// +// or +// +// 2^(q - 2 + alpha) <= c * w < 2^(q + gamma) +// +// The choice of (alpha,gamma) determines the size of the table and the form of +// the digit generation procedure. Using (alpha,gamma)=(-60,-32) works out well +// in practice: +// +// The idea is to cut the number c * w = f * 2^e into two parts, which can be +// processed independently: An integral part p1, and a fractional part p2: +// +// f * 2^e = ( (f div 2^-e) * 2^-e + (f mod 2^-e) ) * 2^e +// = (f div 2^-e) + (f mod 2^-e) * 2^e +// = p1 + p2 * 2^e +// +// The conversion of p1 into decimal form requires a series of divisions and +// modulos by (a power of) 10. These operations are faster for 32-bit than for +// 64-bit integers, so p1 should ideally fit into a 32-bit integer. This can be +// achieved by choosing +// +// -e >= 32 or e <= -32 := gamma +// +// In order to convert the fractional part +// +// p2 * 2^e = p2 / 2^-e = d[-1] / 10^1 + d[-2] / 10^2 + ... +// +// into decimal form, the fraction is repeatedly multiplied by 10 and the digits +// d[-i] are extracted in order: +// +// (10 * p2) div 2^-e = d[-1] +// (10 * p2) mod 2^-e = d[-2] / 10^1 + ... +// +// The multiplication by 10 must not overflow. It is sufficient to choose +// +// 10 * p2 < 16 * p2 = 2^4 * p2 <= 2^64. +// +// Since p2 = f mod 2^-e < 2^-e, +// +// -e <= 60 or e >= -60 := alpha + +constexpr int kAlpha = -60; +constexpr int kGamma = -32; + +struct cached_power // c = f * 2^e ~= 10^k +{ + std::uint64_t f; + int e; + int k; +}; + +/*! +For a normalized diyfp w = f * 2^e, this function returns a (normalized) cached +power-of-ten c = f_c * 2^e_c, such that the exponent of the product w * c +satisfies (Definition 3.2 from [1]) + + alpha <= e_c + e + q <= gamma. +*/ +inline cached_power get_cached_power_for_binary_exponent(int e) +{ + // Now + // + // alpha <= e_c + e + q <= gamma (1) + // ==> f_c * 2^alpha <= c * 2^e * 2^q + // + // and since the c's are normalized, 2^(q-1) <= f_c, + // + // ==> 2^(q - 1 + alpha) <= c * 2^(e + q) + // ==> 2^(alpha - e - 1) <= c + // + // If c were an exact power of ten, i.e. c = 10^k, one may determine k as + // + // k = ceil( log_10( 2^(alpha - e - 1) ) ) + // = ceil( (alpha - e - 1) * log_10(2) ) + // + // From the paper: + // "In theory the result of the procedure could be wrong since c is rounded, + // and the computation itself is approximated [...]. In practice, however, + // this simple function is sufficient." + // + // For IEEE double precision floating-point numbers converted into + // normalized diyfp's w = f * 2^e, with q = 64, + // + // e >= -1022 (min IEEE exponent) + // -52 (p - 1) + // -52 (p - 1, possibly normalize denormal IEEE numbers) + // -11 (normalize the diyfp) + // = -1137 + // + // and + // + // e <= +1023 (max IEEE exponent) + // -52 (p - 1) + // -11 (normalize the diyfp) + // = 960 + // + // This binary exponent range [-1137,960] results in a decimal exponent + // range [-307,324]. One does not need to store a cached power for each + // k in this range. For each such k it suffices to find a cached power + // such that the exponent of the product lies in [alpha,gamma]. + // This implies that the difference of the decimal exponents of adjacent + // table entries must be less than or equal to + // + // floor( (gamma - alpha) * log_10(2) ) = 8. + // + // (A smaller distance gamma-alpha would require a larger table.) + + // NB: + // Actually this function returns c, such that -60 <= e_c + e + 64 <= -34. + + constexpr int kCachedPowersMinDecExp = -300; + constexpr int kCachedPowersDecStep = 8; + + static constexpr std::array<cached_power, 79> kCachedPowers = + { + { + { 0xAB70FE17C79AC6CA, -1060, -300 }, + { 0xFF77B1FCBEBCDC4F, -1034, -292 }, + { 0xBE5691EF416BD60C, -1007, -284 }, + { 0x8DD01FAD907FFC3C, -980, -276 }, + { 0xD3515C2831559A83, -954, -268 }, + { 0x9D71AC8FADA6C9B5, -927, -260 }, + { 0xEA9C227723EE8BCB, -901, -252 }, + { 0xAECC49914078536D, -874, -244 }, + { 0x823C12795DB6CE57, -847, -236 }, + { 0xC21094364DFB5637, -821, -228 }, + { 0x9096EA6F3848984F, -794, -220 }, + { 0xD77485CB25823AC7, -768, -212 }, + { 0xA086CFCD97BF97F4, -741, -204 }, + { 0xEF340A98172AACE5, -715, -196 }, + { 0xB23867FB2A35B28E, -688, -188 }, + { 0x84C8D4DFD2C63F3B, -661, -180 }, + { 0xC5DD44271AD3CDBA, -635, -172 }, + { 0x936B9FCEBB25C996, -608, -164 }, + { 0xDBAC6C247D62A584, -582, -156 }, + { 0xA3AB66580D5FDAF6, -555, -148 }, + { 0xF3E2F893DEC3F126, -529, -140 }, + { 0xB5B5ADA8AAFF80B8, -502, -132 }, + { 0x87625F056C7C4A8B, -475, -124 }, + { 0xC9BCFF6034C13053, -449, -116 }, + { 0x964E858C91BA2655, -422, -108 }, + { 0xDFF9772470297EBD, -396, -100 }, + { 0xA6DFBD9FB8E5B88F, -369, -92 }, + { 0xF8A95FCF88747D94, -343, -84 }, + { 0xB94470938FA89BCF, -316, -76 }, + { 0x8A08F0F8BF0F156B, -289, -68 }, + { 0xCDB02555653131B6, -263, -60 }, + { 0x993FE2C6D07B7FAC, -236, -52 }, + { 0xE45C10C42A2B3B06, -210, -44 }, + { 0xAA242499697392D3, -183, -36 }, + { 0xFD87B5F28300CA0E, -157, -28 }, + { 0xBCE5086492111AEB, -130, -20 }, + { 0x8CBCCC096F5088CC, -103, -12 }, + { 0xD1B71758E219652C, -77, -4 }, + { 0x9C40000000000000, -50, 4 }, + { 0xE8D4A51000000000, -24, 12 }, + { 0xAD78EBC5AC620000, 3, 20 }, + { 0x813F3978F8940984, 30, 28 }, + { 0xC097CE7BC90715B3, 56, 36 }, + { 0x8F7E32CE7BEA5C70, 83, 44 }, + { 0xD5D238A4ABE98068, 109, 52 }, + { 0x9F4F2726179A2245, 136, 60 }, + { 0xED63A231D4C4FB27, 162, 68 }, + { 0xB0DE65388CC8ADA8, 189, 76 }, + { 0x83C7088E1AAB65DB, 216, 84 }, + { 0xC45D1DF942711D9A, 242, 92 }, + { 0x924D692CA61BE758, 269, 100 }, + { 0xDA01EE641A708DEA, 295, 108 }, + { 0xA26DA3999AEF774A, 322, 116 }, + { 0xF209787BB47D6B85, 348, 124 }, + { 0xB454E4A179DD1877, 375, 132 }, + { 0x865B86925B9BC5C2, 402, 140 }, + { 0xC83553C5C8965D3D, 428, 148 }, + { 0x952AB45CFA97A0B3, 455, 156 }, + { 0xDE469FBD99A05FE3, 481, 164 }, + { 0xA59BC234DB398C25, 508, 172 }, + { 0xF6C69A72A3989F5C, 534, 180 }, + { 0xB7DCBF5354E9BECE, 561, 188 }, + { 0x88FCF317F22241E2, 588, 196 }, + { 0xCC20CE9BD35C78A5, 614, 204 }, + { 0x98165AF37B2153DF, 641, 212 }, + { 0xE2A0B5DC971F303A, 667, 220 }, + { 0xA8D9D1535CE3B396, 694, 228 }, + { 0xFB9B7CD9A4A7443C, 720, 236 }, + { 0xBB764C4CA7A44410, 747, 244 }, + { 0x8BAB8EEFB6409C1A, 774, 252 }, + { 0xD01FEF10A657842C, 800, 260 }, + { 0x9B10A4E5E9913129, 827, 268 }, + { 0xE7109BFBA19C0C9D, 853, 276 }, + { 0xAC2820D9623BF429, 880, 284 }, + { 0x80444B5E7AA7CF85, 907, 292 }, + { 0xBF21E44003ACDD2D, 933, 300 }, + { 0x8E679C2F5E44FF8F, 960, 308 }, + { 0xD433179D9C8CB841, 986, 316 }, + { 0x9E19DB92B4E31BA9, 1013, 324 }, + } + }; + + // This computation gives exactly the same results for k as + // k = ceil((kAlpha - e - 1) * 0.30102999566398114) + // for |e| <= 1500, but doesn't require floating-point operations. + // NB: log_10(2) ~= 78913 / 2^18 + JSON_ASSERT(e >= -1500); + JSON_ASSERT(e <= 1500); + const int f = kAlpha - e - 1; + const int k = (f * 78913) / (1 << 18) + static_cast<int>(f > 0); + + const int index = (-kCachedPowersMinDecExp + k + (kCachedPowersDecStep - 1)) / kCachedPowersDecStep; + JSON_ASSERT(index >= 0); + JSON_ASSERT(static_cast<std::size_t>(index) < kCachedPowers.size()); + + const cached_power cached = kCachedPowers[static_cast<std::size_t>(index)]; + JSON_ASSERT(kAlpha <= cached.e + e + 64); + JSON_ASSERT(kGamma >= cached.e + e + 64); + + return cached; +} + +/*! +For n != 0, returns k, such that pow10 := 10^(k-1) <= n < 10^k. +For n == 0, returns 1 and sets pow10 := 1. +*/ +inline int find_largest_pow10(const std::uint32_t n, std::uint32_t& pow10) +{ + // LCOV_EXCL_START + if (n >= 1000000000) + { + pow10 = 1000000000; + return 10; + } + // LCOV_EXCL_STOP + if (n >= 100000000) + { + pow10 = 100000000; + return 9; + } + if (n >= 10000000) + { + pow10 = 10000000; + return 8; + } + if (n >= 1000000) + { + pow10 = 1000000; + return 7; + } + if (n >= 100000) + { + pow10 = 100000; + return 6; + } + if (n >= 10000) + { + pow10 = 10000; + return 5; + } + if (n >= 1000) + { + pow10 = 1000; + return 4; + } + if (n >= 100) + { + pow10 = 100; + return 3; + } + if (n >= 10) + { + pow10 = 10; + return 2; + } + + pow10 = 1; + return 1; +} + +inline void grisu2_round(char* buf, int len, std::uint64_t dist, std::uint64_t delta, + std::uint64_t rest, std::uint64_t ten_k) +{ + JSON_ASSERT(len >= 1); + JSON_ASSERT(dist <= delta); + JSON_ASSERT(rest <= delta); + JSON_ASSERT(ten_k > 0); + + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // ten_k + // <------> + // <---- rest ----> + // --------------[------------------+----+--------------]-------------- + // w V + // = buf * 10^k + // + // ten_k represents a unit-in-the-last-place in the decimal representation + // stored in buf. + // Decrement buf by ten_k while this takes buf closer to w. + + // The tests are written in this order to avoid overflow in unsigned + // integer arithmetic. + + while (rest < dist + && delta - rest >= ten_k + && (rest + ten_k < dist || dist - rest > rest + ten_k - dist)) + { + JSON_ASSERT(buf[len - 1] != '0'); + buf[len - 1]--; + rest += ten_k; + } +} + +/*! +Generates V = buffer * 10^decimal_exponent, such that M- <= V <= M+. +M- and M+ must be normalized and share the same exponent -60 <= e <= -32. +*/ +inline void grisu2_digit_gen(char* buffer, int& length, int& decimal_exponent, + diyfp M_minus, diyfp w, diyfp M_plus) +{ + static_assert(kAlpha >= -60, "internal error"); + static_assert(kGamma <= -32, "internal error"); + + // Generates the digits (and the exponent) of a decimal floating-point + // number V = buffer * 10^decimal_exponent in the range [M-, M+]. The diyfp's + // w, M- and M+ share the same exponent e, which satisfies alpha <= e <= gamma. + // + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // Grisu2 generates the digits of M+ from left to right and stops as soon as + // V is in [M-,M+]. + + JSON_ASSERT(M_plus.e >= kAlpha); + JSON_ASSERT(M_plus.e <= kGamma); + + std::uint64_t delta = diyfp::sub(M_plus, M_minus).f; // (significand of (M+ - M-), implicit exponent is e) + std::uint64_t dist = diyfp::sub(M_plus, w ).f; // (significand of (M+ - w ), implicit exponent is e) + + // Split M+ = f * 2^e into two parts p1 and p2 (note: e < 0): + // + // M+ = f * 2^e + // = ((f div 2^-e) * 2^-e + (f mod 2^-e)) * 2^e + // = ((p1 ) * 2^-e + (p2 )) * 2^e + // = p1 + p2 * 2^e + + const diyfp one(std::uint64_t{1} << -M_plus.e, M_plus.e); + + auto p1 = static_cast<std::uint32_t>(M_plus.f >> -one.e); // p1 = f div 2^-e (Since -e >= 32, p1 fits into a 32-bit int.) + std::uint64_t p2 = M_plus.f & (one.f - 1); // p2 = f mod 2^-e + + // 1) + // + // Generate the digits of the integral part p1 = d[n-1]...d[1]d[0] + + JSON_ASSERT(p1 > 0); + + std::uint32_t pow10{}; + const int k = find_largest_pow10(p1, pow10); + + // 10^(k-1) <= p1 < 10^k, pow10 = 10^(k-1) + // + // p1 = (p1 div 10^(k-1)) * 10^(k-1) + (p1 mod 10^(k-1)) + // = (d[k-1] ) * 10^(k-1) + (p1 mod 10^(k-1)) + // + // M+ = p1 + p2 * 2^e + // = d[k-1] * 10^(k-1) + (p1 mod 10^(k-1)) + p2 * 2^e + // = d[k-1] * 10^(k-1) + ((p1 mod 10^(k-1)) * 2^-e + p2) * 2^e + // = d[k-1] * 10^(k-1) + ( rest) * 2^e + // + // Now generate the digits d[n] of p1 from left to right (n = k-1,...,0) + // + // p1 = d[k-1]...d[n] * 10^n + d[n-1]...d[0] + // + // but stop as soon as + // + // rest * 2^e = (d[n-1]...d[0] * 2^-e + p2) * 2^e <= delta * 2^e + + int n = k; + while (n > 0) + { + // Invariants: + // M+ = buffer * 10^n + (p1 + p2 * 2^e) (buffer = 0 for n = k) + // pow10 = 10^(n-1) <= p1 < 10^n + // + const std::uint32_t d = p1 / pow10; // d = p1 div 10^(n-1) + const std::uint32_t r = p1 % pow10; // r = p1 mod 10^(n-1) + // + // M+ = buffer * 10^n + (d * 10^(n-1) + r) + p2 * 2^e + // = (buffer * 10 + d) * 10^(n-1) + (r + p2 * 2^e) + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast<char>('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(n-1) + (r + p2 * 2^e) + // + p1 = r; + n--; + // + // M+ = buffer * 10^n + (p1 + p2 * 2^e) + // pow10 = 10^n + // + + // Now check if enough digits have been generated. + // Compute + // + // p1 + p2 * 2^e = (p1 * 2^-e + p2) * 2^e = rest * 2^e + // + // Note: + // Since rest and delta share the same exponent e, it suffices to + // compare the significands. + const std::uint64_t rest = (std::uint64_t{p1} << -one.e) + p2; + if (rest <= delta) + { + // V = buffer * 10^n, with M- <= V <= M+. + + decimal_exponent += n; + + // We may now just stop. But instead look if the buffer could be + // decremented to bring V closer to w. + // + // pow10 = 10^n is now 1 ulp in the decimal representation V. + // The rounding procedure works with diyfp's with an implicit + // exponent of e. + // + // 10^n = (10^n * 2^-e) * 2^e = ulp * 2^e + // + const std::uint64_t ten_n = std::uint64_t{pow10} << -one.e; + grisu2_round(buffer, length, dist, delta, rest, ten_n); + + return; + } + + pow10 /= 10; + // + // pow10 = 10^(n-1) <= p1 < 10^n + // Invariants restored. + } + + // 2) + // + // The digits of the integral part have been generated: + // + // M+ = d[k-1]...d[1]d[0] + p2 * 2^e + // = buffer + p2 * 2^e + // + // Now generate the digits of the fractional part p2 * 2^e. + // + // Note: + // No decimal point is generated: the exponent is adjusted instead. + // + // p2 actually represents the fraction + // + // p2 * 2^e + // = p2 / 2^-e + // = d[-1] / 10^1 + d[-2] / 10^2 + ... + // + // Now generate the digits d[-m] of p1 from left to right (m = 1,2,...) + // + // p2 * 2^e = d[-1]d[-2]...d[-m] * 10^-m + // + 10^-m * (d[-m-1] / 10^1 + d[-m-2] / 10^2 + ...) + // + // using + // + // 10^m * p2 = ((10^m * p2) div 2^-e) * 2^-e + ((10^m * p2) mod 2^-e) + // = ( d) * 2^-e + ( r) + // + // or + // 10^m * p2 * 2^e = d + r * 2^e + // + // i.e. + // + // M+ = buffer + p2 * 2^e + // = buffer + 10^-m * (d + r * 2^e) + // = (buffer * 10^m + d) * 10^-m + 10^-m * r * 2^e + // + // and stop as soon as 10^-m * r * 2^e <= delta * 2^e + + JSON_ASSERT(p2 > delta); + + int m = 0; + for (;;) + { + // Invariant: + // M+ = buffer * 10^-m + 10^-m * (d[-m-1] / 10 + d[-m-2] / 10^2 + ...) * 2^e + // = buffer * 10^-m + 10^-m * (p2 ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (10 * p2) ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * ((10*p2 div 2^-e) * 2^-e + (10*p2 mod 2^-e)) * 2^e + // + JSON_ASSERT(p2 <= (std::numeric_limits<std::uint64_t>::max)() / 10); + p2 *= 10; + const std::uint64_t d = p2 >> -one.e; // d = (10 * p2) div 2^-e + const std::uint64_t r = p2 & (one.f - 1); // r = (10 * p2) mod 2^-e + // + // M+ = buffer * 10^-m + 10^-m * (1/10 * (d * 2^-e + r) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (d + r * 2^e)) + // = (buffer * 10 + d) * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast<char>('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + p2 = r; + m++; + // + // M+ = buffer * 10^-m + 10^-m * p2 * 2^e + // Invariant restored. + + // Check if enough digits have been generated. + // + // 10^-m * p2 * 2^e <= delta * 2^e + // p2 * 2^e <= 10^m * delta * 2^e + // p2 <= 10^m * delta + delta *= 10; + dist *= 10; + if (p2 <= delta) + { + break; + } + } + + // V = buffer * 10^-m, with M- <= V <= M+. + + decimal_exponent -= m; + + // 1 ulp in the decimal representation is now 10^-m. + // Since delta and dist are now scaled by 10^m, we need to do the + // same with ulp in order to keep the units in sync. + // + // 10^m * 10^-m = 1 = 2^-e * 2^e = ten_m * 2^e + // + const std::uint64_t ten_m = one.f; + grisu2_round(buffer, length, dist, delta, p2, ten_m); + + // By construction this algorithm generates the shortest possible decimal + // number (Loitsch, Theorem 6.2) which rounds back to w. + // For an input number of precision p, at least + // + // N = 1 + ceil(p * log_10(2)) + // + // decimal digits are sufficient to identify all binary floating-point + // numbers (Matula, "In-and-Out conversions"). + // This implies that the algorithm does not produce more than N decimal + // digits. + // + // N = 17 for p = 53 (IEEE double precision) + // N = 9 for p = 24 (IEEE single precision) +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +JSON_HEDLEY_NON_NULL(1) +inline void grisu2(char* buf, int& len, int& decimal_exponent, + diyfp m_minus, diyfp v, diyfp m_plus) +{ + JSON_ASSERT(m_plus.e == m_minus.e); + JSON_ASSERT(m_plus.e == v.e); + + // --------(-----------------------+-----------------------)-------- (A) + // m- v m+ + // + // --------------------(-----------+-----------------------)-------- (B) + // m- v m+ + // + // First scale v (and m- and m+) such that the exponent is in the range + // [alpha, gamma]. + + const cached_power cached = get_cached_power_for_binary_exponent(m_plus.e); + + const diyfp c_minus_k(cached.f, cached.e); // = c ~= 10^-k + + // The exponent of the products is = v.e + c_minus_k.e + q and is in the range [alpha,gamma] + const diyfp w = diyfp::mul(v, c_minus_k); + const diyfp w_minus = diyfp::mul(m_minus, c_minus_k); + const diyfp w_plus = diyfp::mul(m_plus, c_minus_k); + + // ----(---+---)---------------(---+---)---------------(---+---)---- + // w- w w+ + // = c*m- = c*v = c*m+ + // + // diyfp::mul rounds its result and c_minus_k is approximated too. w, w- and + // w+ are now off by a small amount. + // In fact: + // + // w - v * 10^k < 1 ulp + // + // To account for this inaccuracy, add resp. subtract 1 ulp. + // + // --------+---[---------------(---+---)---------------]---+-------- + // w- M- w M+ w+ + // + // Now any number in [M-, M+] (bounds included) will round to w when input, + // regardless of how the input rounding algorithm breaks ties. + // + // And digit_gen generates the shortest possible such number in [M-, M+]. + // Note that this does not mean that Grisu2 always generates the shortest + // possible number in the interval (m-, m+). + const diyfp M_minus(w_minus.f + 1, w_minus.e); + const diyfp M_plus (w_plus.f - 1, w_plus.e ); + + decimal_exponent = -cached.k; // = -(-k) = k + + grisu2_digit_gen(buf, len, decimal_exponent, M_minus, w, M_plus); +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +template<typename FloatType> +JSON_HEDLEY_NON_NULL(1) +void grisu2(char* buf, int& len, int& decimal_exponent, FloatType value) +{ + static_assert(diyfp::kPrecision >= std::numeric_limits<FloatType>::digits + 3, + "internal error: not enough precision"); + + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // If the neighbors (and boundaries) of 'value' are always computed for double-precision + // numbers, all float's can be recovered using strtod (and strtof). However, the resulting + // decimal representations are not exactly "short". + // + // The documentation for 'std::to_chars' (https://en.cppreference.com/w/cpp/utility/to_chars) + // says "value is converted to a string as if by std::sprintf in the default ("C") locale" + // and since sprintf promotes floats to doubles, I think this is exactly what 'std::to_chars' + // does. + // On the other hand, the documentation for 'std::to_chars' requires that "parsing the + // representation using the corresponding std::from_chars function recovers value exactly". That + // indicates that single precision floating-point numbers should be recovered using + // 'std::strtof'. + // + // NB: If the neighbors are computed for single-precision numbers, there is a single float + // (7.0385307e-26f) which can't be recovered using strtod. The resulting double precision + // value is off by 1 ulp. +#if 0 + const boundaries w = compute_boundaries(static_cast<double>(value)); +#else + const boundaries w = compute_boundaries(value); +#endif + + grisu2(buf, len, decimal_exponent, w.minus, w.w, w.plus); +} + +/*! +@brief appends a decimal representation of e to buf +@return a pointer to the element following the exponent. +@pre -1000 < e < 1000 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* append_exponent(char* buf, int e) +{ + JSON_ASSERT(e > -1000); + JSON_ASSERT(e < 1000); + + if (e < 0) + { + e = -e; + *buf++ = '-'; + } + else + { + *buf++ = '+'; + } + + auto k = static_cast<std::uint32_t>(e); + if (k < 10) + { + // Always print at least two digits in the exponent. + // This is for compatibility with printf("%g"). + *buf++ = '0'; + *buf++ = static_cast<char>('0' + k); + } + else if (k < 100) + { + *buf++ = static_cast<char>('0' + k / 10); + k %= 10; + *buf++ = static_cast<char>('0' + k); + } + else + { + *buf++ = static_cast<char>('0' + k / 100); + k %= 100; + *buf++ = static_cast<char>('0' + k / 10); + k %= 10; + *buf++ = static_cast<char>('0' + k); + } + + return buf; +} + +/*! +@brief prettify v = buf * 10^decimal_exponent + +If v is in the range [10^min_exp, 10^max_exp) it will be printed in fixed-point +notation. Otherwise it will be printed in exponential notation. + +@pre min_exp < 0 +@pre max_exp > 0 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* format_buffer(char* buf, int len, int decimal_exponent, + int min_exp, int max_exp) +{ + JSON_ASSERT(min_exp < 0); + JSON_ASSERT(max_exp > 0); + + const int k = len; + const int n = len + decimal_exponent; + + // v = buf * 10^(n-k) + // k is the length of the buffer (number of decimal digits) + // n is the position of the decimal point relative to the start of the buffer. + + if (k <= n && n <= max_exp) + { + // digits[000] + // len <= max_exp + 2 + + std::memset(buf + k, '0', static_cast<size_t>(n) - static_cast<size_t>(k)); + // Make it look like a floating-point number (#362, #378) + buf[n + 0] = '.'; + buf[n + 1] = '0'; + return buf + (static_cast<size_t>(n) + 2); + } + + if (0 < n && n <= max_exp) + { + // dig.its + // len <= max_digits10 + 1 + + JSON_ASSERT(k > n); + + std::memmove(buf + (static_cast<size_t>(n) + 1), buf + n, static_cast<size_t>(k) - static_cast<size_t>(n)); + buf[n] = '.'; + return buf + (static_cast<size_t>(k) + 1U); + } + + if (min_exp < n && n <= 0) + { + // 0.[000]digits + // len <= 2 + (-min_exp - 1) + max_digits10 + + std::memmove(buf + (2 + static_cast<size_t>(-n)), buf, static_cast<size_t>(k)); + buf[0] = '0'; + buf[1] = '.'; + std::memset(buf + 2, '0', static_cast<size_t>(-n)); + return buf + (2U + static_cast<size_t>(-n) + static_cast<size_t>(k)); + } + + if (k == 1) + { + // dE+123 + // len <= 1 + 5 + + buf += 1; + } + else + { + // d.igitsE+123 + // len <= max_digits10 + 1 + 5 + + std::memmove(buf + 2, buf + 1, static_cast<size_t>(k) - 1); + buf[1] = '.'; + buf += 1 + static_cast<size_t>(k); + } + + *buf++ = 'e'; + return append_exponent(buf, n - 1); +} + +} // namespace dtoa_impl + +/*! +@brief generates a decimal representation of the floating-point number value in [first, last). + +The format of the resulting decimal representation is similar to printf's %g +format. Returns an iterator pointing past-the-end of the decimal representation. + +@note The input number must be finite, i.e. NaN's and Inf's are not supported. +@note The buffer must be large enough. +@note The result is NOT null-terminated. +*/ +template<typename FloatType> +JSON_HEDLEY_NON_NULL(1, 2) +JSON_HEDLEY_RETURNS_NON_NULL +char* to_chars(char* first, const char* last, FloatType value) +{ + static_cast<void>(last); // maybe unused - fix warning + JSON_ASSERT(std::isfinite(value)); + + // Use signbit(value) instead of (value < 0) since signbit works for -0. + if (std::signbit(value)) + { + value = -value; + *first++ = '-'; + } + +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + if (value == 0) // +-0 + { + *first++ = '0'; + // Make it look like a floating-point number (#362, #378) + *first++ = '.'; + *first++ = '0'; + return first; + } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + + JSON_ASSERT(last - first >= std::numeric_limits<FloatType>::max_digits10); + + // Compute v = buffer * 10^decimal_exponent. + // The decimal digits are stored in the buffer, which needs to be interpreted + // as an unsigned decimal integer. + // len is the length of the buffer, i.e. the number of decimal digits. + int len = 0; + int decimal_exponent = 0; + dtoa_impl::grisu2(first, len, decimal_exponent, value); + + JSON_ASSERT(len <= std::numeric_limits<FloatType>::max_digits10); + + // Format the buffer like printf("%.*g", prec, value) + constexpr int kMinExp = -4; + // Use digits10 here to increase compatibility with version 2. + constexpr int kMaxExp = std::numeric_limits<FloatType>::digits10; + + JSON_ASSERT(last - first >= kMaxExp + 2); + JSON_ASSERT(last - first >= 2 + (-kMinExp - 1) + std::numeric_limits<FloatType>::max_digits10); + JSON_ASSERT(last - first >= std::numeric_limits<FloatType>::max_digits10 + 6); + + return dtoa_impl::format_buffer(first, len, decimal_exponent, kMinExp, kMaxExp); +} + +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/exceptions.hpp> + +// #include <nlohmann/detail/macro_scope.hpp> + +// #include <nlohmann/detail/meta/cpp_future.hpp> + +// #include <nlohmann/detail/output/binary_writer.hpp> + +// #include <nlohmann/detail/output/output_adapters.hpp> + +// #include <nlohmann/detail/string_concat.hpp> + +// #include <nlohmann/detail/value_t.hpp> + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// serialization // +/////////////////// + +/// how to treat decoding errors +enum class error_handler_t +{ + strict, ///< throw a type_error exception in case of invalid UTF-8 + replace, ///< replace invalid UTF-8 sequences with U+FFFD + ignore ///< ignore invalid UTF-8 sequences +}; + +template<typename BasicJsonType> +class serializer +{ + using string_t = typename BasicJsonType::string_t; + using number_float_t = typename BasicJsonType::number_float_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using binary_char_t = typename BasicJsonType::binary_t::value_type; + static constexpr std::uint8_t UTF8_ACCEPT = 0; + static constexpr std::uint8_t UTF8_REJECT = 1; + + public: + /*! + @param[in] s output stream to serialize to + @param[in] ichar indentation character to use + @param[in] error_handler_ how to react on decoding errors + */ + serializer(output_adapter_t<char> s, const char ichar, + error_handler_t error_handler_ = error_handler_t::strict) + : o(std::move(s)) + , loc(std::localeconv()) + , thousands_sep(loc->thousands_sep == nullptr ? '\0' : std::char_traits<char>::to_char_type(* (loc->thousands_sep))) + , decimal_point(loc->decimal_point == nullptr ? '\0' : std::char_traits<char>::to_char_type(* (loc->decimal_point))) + , indent_char(ichar) + , indent_string(512, indent_char) + , error_handler(error_handler_) + {} + + // delete because of pointer members + serializer(const serializer&) = delete; + serializer& operator=(const serializer&) = delete; + serializer(serializer&&) = delete; + serializer& operator=(serializer&&) = delete; + ~serializer() = default; + + /*! + @brief internal implementation of the serialization function + + This function is called by the public member function dump and organizes + the serialization internally. The indentation level is propagated as + additional parameter. In case of arrays and objects, the function is + called recursively. + + - strings and object keys are escaped using `escape_string()` + - integer numbers are converted implicitly via `operator<<` + - floating-point numbers are converted to a string using `"%g"` format + - binary values are serialized as objects containing the subtype and the + byte array + + @param[in] val value to serialize + @param[in] pretty_print whether the output shall be pretty-printed + @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters + in the output are escaped with `\uXXXX` sequences, and the result consists + of ASCII characters only. + @param[in] indent_step the indent level + @param[in] current_indent the current indent level (only used internally) + */ + void dump(const BasicJsonType& val, + const bool pretty_print, + const bool ensure_ascii, + const unsigned int indent_step, + const unsigned int current_indent = 0) + { + switch (val.m_type) + { + case value_t::object: + { + if (val.m_value.object->empty()) + { + o->write_characters("{}", 2); + return; + } + + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_character('{'); + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + + o->write_character('}'); + } + + return; + } + + case value_t::array: + { + if (val.m_value.array->empty()) + { + o->write_characters("[]", 2); + return; + } + + if (pretty_print) + { + o->write_characters("[\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + dump(*i, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + o->write_characters(indent_string.c_str(), new_indent); + dump(val.m_value.array->back(), true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character(']'); + } + else + { + o->write_character('['); + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + dump(*i, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + dump(val.m_value.array->back(), false, ensure_ascii, indent_step, current_indent); + + o->write_character(']'); + } + + return; + } + + case value_t::string: + { + o->write_character('\"'); + dump_escaped(*val.m_value.string, ensure_ascii); + o->write_character('\"'); + return; + } + + case value_t::binary: + { + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"bytes\": [", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_characters(", ", 2); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\n", 3); + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"subtype\": ", 11); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + } + else + { + o->write_characters("null", 4); + } + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_characters("{\"bytes\":[", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_character(','); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\"subtype\":", 12); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + o->write_character('}'); + } + else + { + o->write_characters("null}", 5); + } + } + return; + } + + case value_t::boolean: + { + if (val.m_value.boolean) + { + o->write_characters("true", 4); + } + else + { + o->write_characters("false", 5); + } + return; + } + + case value_t::number_integer: + { + dump_integer(val.m_value.number_integer); + return; + } + + case value_t::number_unsigned: + { + dump_integer(val.m_value.number_unsigned); + return; + } + + case value_t::number_float: + { + dump_float(val.m_value.number_float); + return; + } + + case value_t::discarded: + { + o->write_characters("<discarded>", 11); + return; + } + + case value_t::null: + { + o->write_characters("null", 4); + return; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + } + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief dump escaped string + + Escape a string by replacing certain special characters by a sequence of an + escape character (backslash) and another character and other control + characters by a sequence of "\u" followed by a four-digit hex + representation. The escaped string is written to output stream @a o. + + @param[in] s the string to escape + @param[in] ensure_ascii whether to escape non-ASCII characters with + \uXXXX sequences + + @complexity Linear in the length of string @a s. + */ + void dump_escaped(const string_t& s, const bool ensure_ascii) + { + std::uint32_t codepoint{}; + std::uint8_t state = UTF8_ACCEPT; + std::size_t bytes = 0; // number of bytes written to string_buffer + + // number of bytes written at the point of the last valid byte + std::size_t bytes_after_last_accept = 0; + std::size_t undumped_chars = 0; + + for (std::size_t i = 0; i < s.size(); ++i) + { + const auto byte = static_cast<std::uint8_t>(s[i]); + + switch (decode(state, codepoint, byte)) + { + case UTF8_ACCEPT: // decode found a new code point + { + switch (codepoint) + { + case 0x08: // backspace + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'b'; + break; + } + + case 0x09: // horizontal tab + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 't'; + break; + } + + case 0x0A: // newline + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'n'; + break; + } + + case 0x0C: // formfeed + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'f'; + break; + } + + case 0x0D: // carriage return + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'r'; + break; + } + + case 0x22: // quotation mark + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\"'; + break; + } + + case 0x5C: // reverse solidus + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\\'; + break; + } + + default: + { + // escape control characters (0x00..0x1F) or, if + // ensure_ascii parameter is used, non-ASCII characters + if ((codepoint <= 0x1F) || (ensure_ascii && (codepoint >= 0x7F))) + { + if (codepoint <= 0xFFFF) + { + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + static_cast<void>((std::snprintf)(string_buffer.data() + bytes, 7, "\\u%04x", + static_cast<std::uint16_t>(codepoint))); + bytes += 6; + } + else + { + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + static_cast<void>((std::snprintf)(string_buffer.data() + bytes, 13, "\\u%04x\\u%04x", + static_cast<std::uint16_t>(0xD7C0u + (codepoint >> 10u)), + static_cast<std::uint16_t>(0xDC00u + (codepoint & 0x3FFu)))); + bytes += 12; + } + } + else + { + // copy byte to buffer (all previous bytes + // been copied have in default case above) + string_buffer[bytes++] = s[i]; + } + break; + } + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + // remember the byte position of this accept + bytes_after_last_accept = bytes; + undumped_chars = 0; + break; + } + + case UTF8_REJECT: // decode found invalid UTF-8 byte + { + switch (error_handler) + { + case error_handler_t::strict: + { + JSON_THROW(type_error::create(316, concat("invalid UTF-8 byte at index ", std::to_string(i), ": 0x", hex_bytes(byte | 0)), nullptr)); + } + + case error_handler_t::ignore: + case error_handler_t::replace: + { + // in case we saw this character the first time, we + // would like to read it again, because the byte + // may be OK for itself, but just not OK for the + // previous sequence + if (undumped_chars > 0) + { + --i; + } + + // reset length buffer to the last accepted index; + // thus removing/ignoring the invalid characters + bytes = bytes_after_last_accept; + + if (error_handler == error_handler_t::replace) + { + // add a replacement character + if (ensure_ascii) + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'u'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'd'; + } + else + { + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xEF'); + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xBF'); + string_buffer[bytes++] = detail::binary_writer<BasicJsonType, char>::to_char_type('\xBD'); + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + bytes_after_last_accept = bytes; + } + + undumped_chars = 0; + + // continue processing the string + state = UTF8_ACCEPT; + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + break; + } + + default: // decode found yet incomplete multi-byte code point + { + if (!ensure_ascii) + { + // code point will not be escaped - copy byte to buffer + string_buffer[bytes++] = s[i]; + } + ++undumped_chars; + break; + } + } + } + + // we finished processing the string + if (JSON_HEDLEY_LIKELY(state == UTF8_ACCEPT)) + { + // write buffer + if (bytes > 0) + { + o->write_characters(string_buffer.data(), bytes); + } + } + else + { + // we finish reading, but do not accept: string was incomplete + switch (error_handler) + { + case error_handler_t::strict: + { + JSON_THROW(type_error::create(316, concat("incomplete UTF-8 string; last byte: 0x", hex_bytes(static_cast<std::uint8_t>(s.back() | 0))), nullptr)); + } + + case error_handler_t::ignore: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + break; + } + + case error_handler_t::replace: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + // add a replacement character + if (ensure_ascii) + { + o->write_characters("\\ufffd", 6); + } + else + { + o->write_characters("\xEF\xBF\xBD", 3); + } + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + } + } + + private: + /*! + @brief count digits + + Count the number of decimal (base 10) digits for an input unsigned integer. + + @param[in] x unsigned integer number to count its digits + @return number of decimal digits + */ + inline unsigned int count_digits(number_unsigned_t x) noexcept + { + unsigned int n_digits = 1; + for (;;) + { + if (x < 10) + { + return n_digits; + } + if (x < 100) + { + return n_digits + 1; + } + if (x < 1000) + { + return n_digits + 2; + } + if (x < 10000) + { + return n_digits + 3; + } + x = x / 10000u; + n_digits += 4; + } + } + + /*! + * @brief convert a byte to a uppercase hex representation + * @param[in] byte byte to represent + * @return representation ("00".."FF") + */ + static std::string hex_bytes(std::uint8_t byte) + { + std::string result = "FF"; + constexpr const char* nibble_to_hex = "0123456789ABCDEF"; + result[0] = nibble_to_hex[byte / 16]; + result[1] = nibble_to_hex[byte % 16]; + return result; + } + + // templates to avoid warnings about useless casts + template <typename NumberType, enable_if_t<std::is_signed<NumberType>::value, int> = 0> + bool is_negative_number(NumberType x) + { + return x < 0; + } + + template < typename NumberType, enable_if_t <std::is_unsigned<NumberType>::value, int > = 0 > + bool is_negative_number(NumberType /*unused*/) + { + return false; + } + + /*! + @brief dump an integer + + Dump a given integer to output stream @a o. Works internally with + @a number_buffer. + + @param[in] x integer number (signed or unsigned) to dump + @tparam NumberType either @a number_integer_t or @a number_unsigned_t + */ + template < typename NumberType, detail::enable_if_t < + std::is_integral<NumberType>::value || + std::is_same<NumberType, number_unsigned_t>::value || + std::is_same<NumberType, number_integer_t>::value || + std::is_same<NumberType, binary_char_t>::value, + int > = 0 > + void dump_integer(NumberType x) + { + static constexpr std::array<std::array<char, 2>, 100> digits_to_99 + { + { + {{'0', '0'}}, {{'0', '1'}}, {{'0', '2'}}, {{'0', '3'}}, {{'0', '4'}}, {{'0', '5'}}, {{'0', '6'}}, {{'0', '7'}}, {{'0', '8'}}, {{'0', '9'}}, + {{'1', '0'}}, {{'1', '1'}}, {{'1', '2'}}, {{'1', '3'}}, {{'1', '4'}}, {{'1', '5'}}, {{'1', '6'}}, {{'1', '7'}}, {{'1', '8'}}, {{'1', '9'}}, + {{'2', '0'}}, {{'2', '1'}}, {{'2', '2'}}, {{'2', '3'}}, {{'2', '4'}}, {{'2', '5'}}, {{'2', '6'}}, {{'2', '7'}}, {{'2', '8'}}, {{'2', '9'}}, + {{'3', '0'}}, {{'3', '1'}}, {{'3', '2'}}, {{'3', '3'}}, {{'3', '4'}}, {{'3', '5'}}, {{'3', '6'}}, {{'3', '7'}}, {{'3', '8'}}, {{'3', '9'}}, + {{'4', '0'}}, {{'4', '1'}}, {{'4', '2'}}, {{'4', '3'}}, {{'4', '4'}}, {{'4', '5'}}, {{'4', '6'}}, {{'4', '7'}}, {{'4', '8'}}, {{'4', '9'}}, + {{'5', '0'}}, {{'5', '1'}}, {{'5', '2'}}, {{'5', '3'}}, {{'5', '4'}}, {{'5', '5'}}, {{'5', '6'}}, {{'5', '7'}}, {{'5', '8'}}, {{'5', '9'}}, + {{'6', '0'}}, {{'6', '1'}}, {{'6', '2'}}, {{'6', '3'}}, {{'6', '4'}}, {{'6', '5'}}, {{'6', '6'}}, {{'6', '7'}}, {{'6', '8'}}, {{'6', '9'}}, + {{'7', '0'}}, {{'7', '1'}}, {{'7', '2'}}, {{'7', '3'}}, {{'7', '4'}}, {{'7', '5'}}, {{'7', '6'}}, {{'7', '7'}}, {{'7', '8'}}, {{'7', '9'}}, + {{'8', '0'}}, {{'8', '1'}}, {{'8', '2'}}, {{'8', '3'}}, {{'8', '4'}}, {{'8', '5'}}, {{'8', '6'}}, {{'8', '7'}}, {{'8', '8'}}, {{'8', '9'}}, + {{'9', '0'}}, {{'9', '1'}}, {{'9', '2'}}, {{'9', '3'}}, {{'9', '4'}}, {{'9', '5'}}, {{'9', '6'}}, {{'9', '7'}}, {{'9', '8'}}, {{'9', '9'}}, + } + }; + + // special case for "0" + if (x == 0) + { + o->write_character('0'); + return; + } + + // use a pointer to fill the buffer + auto buffer_ptr = number_buffer.begin(); // NOLINT(llvm-qualified-auto,readability-qualified-auto,cppcoreguidelines-pro-type-vararg,hicpp-vararg) + + number_unsigned_t abs_value; + + unsigned int n_chars{}; + + if (is_negative_number(x)) + { + *buffer_ptr = '-'; + abs_value = remove_sign(static_cast<number_integer_t>(x)); + + // account one more byte for the minus sign + n_chars = 1 + count_digits(abs_value); + } + else + { + abs_value = static_cast<number_unsigned_t>(x); + n_chars = count_digits(abs_value); + } + + // spare 1 byte for '\0' + JSON_ASSERT(n_chars < number_buffer.size() - 1); + + // jump to the end to generate the string from backward, + // so we later avoid reversing the result + buffer_ptr += n_chars; + + // Fast int2ascii implementation inspired by "Fastware" talk by Andrei Alexandrescu + // See: https://www.youtube.com/watch?v=o4-CwDo2zpg + while (abs_value >= 100) + { + const auto digits_index = static_cast<unsigned>((abs_value % 100)); + abs_value /= 100; + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + + if (abs_value >= 10) + { + const auto digits_index = static_cast<unsigned>(abs_value); + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + else + { + *(--buffer_ptr) = static_cast<char>('0' + abs_value); + } + + o->write_characters(number_buffer.data(), n_chars); + } + + /*! + @brief dump a floating-point number + + Dump a given floating-point number to output stream @a o. Works internally + with @a number_buffer. + + @param[in] x floating-point number to dump + */ + void dump_float(number_float_t x) + { + // NaN / inf + if (!std::isfinite(x)) + { + o->write_characters("null", 4); + return; + } + + // If number_float_t is an IEEE-754 single or double precision number, + // use the Grisu2 algorithm to produce short numbers which are + // guaranteed to round-trip, using strtof and strtod, resp. + // + // NB: The test below works if <long double> == <double>. + static constexpr bool is_ieee_single_or_double + = (std::numeric_limits<number_float_t>::is_iec559 && std::numeric_limits<number_float_t>::digits == 24 && std::numeric_limits<number_float_t>::max_exponent == 128) || + (std::numeric_limits<number_float_t>::is_iec559 && std::numeric_limits<number_float_t>::digits == 53 && std::numeric_limits<number_float_t>::max_exponent == 1024); + + dump_float(x, std::integral_constant<bool, is_ieee_single_or_double>()); + } + + void dump_float(number_float_t x, std::true_type /*is_ieee_single_or_double*/) + { + auto* begin = number_buffer.data(); + auto* end = ::nlohmann::detail::to_chars(begin, begin + number_buffer.size(), x); + + o->write_characters(begin, static_cast<size_t>(end - begin)); + } + + void dump_float(number_float_t x, std::false_type /*is_ieee_single_or_double*/) + { + // get number of digits for a float -> text -> float round-trip + static constexpr auto d = std::numeric_limits<number_float_t>::max_digits10; + + // the actual conversion + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) + std::ptrdiff_t len = (std::snprintf)(number_buffer.data(), number_buffer.size(), "%.*g", d, x); + + // negative value indicates an error + JSON_ASSERT(len > 0); + // check if buffer was large enough + JSON_ASSERT(static_cast<std::size_t>(len) < number_buffer.size()); + + // erase thousands separator + if (thousands_sep != '\0') + { + // NOLINTNEXTLINE(readability-qualified-auto,llvm-qualified-auto): std::remove returns an iterator, see https://github.com/nlohmann/json/issues/3081 + const auto end = std::remove(number_buffer.begin(), number_buffer.begin() + len, thousands_sep); + std::fill(end, number_buffer.end(), '\0'); + JSON_ASSERT((end - number_buffer.begin()) <= len); + len = (end - number_buffer.begin()); + } + + // convert decimal point to '.' + if (decimal_point != '\0' && decimal_point != '.') + { + // NOLINTNEXTLINE(readability-qualified-auto,llvm-qualified-auto): std::find returns an iterator, see https://github.com/nlohmann/json/issues/3081 + const auto dec_pos = std::find(number_buffer.begin(), number_buffer.end(), decimal_point); + if (dec_pos != number_buffer.end()) + { + *dec_pos = '.'; + } + } + + o->write_characters(number_buffer.data(), static_cast<std::size_t>(len)); + + // determine if we need to append ".0" + const bool value_is_int_like = + std::none_of(number_buffer.begin(), number_buffer.begin() + len + 1, + [](char c) + { + return c == '.' || c == 'e'; + }); + + if (value_is_int_like) + { + o->write_characters(".0", 2); + } + } + + /*! + @brief check whether a string is UTF-8 encoded + + The function checks each byte of a string whether it is UTF-8 encoded. The + result of the check is stored in the @a state parameter. The function must + be called initially with state 0 (accept). State 1 means the string must + be rejected, because the current byte is not allowed. If the string is + completely processed, but the state is non-zero, the string ended + prematurely; that is, the last byte indicated more bytes should have + followed. + + @param[in,out] state the state of the decoding + @param[in,out] codep codepoint (valid only if resulting state is UTF8_ACCEPT) + @param[in] byte next byte to decode + @return new state + + @note The function has been edited: a std::array is used. + + @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann <bjoern@hoehrmann.de> + @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + */ + static std::uint8_t decode(std::uint8_t& state, std::uint32_t& codep, const std::uint8_t byte) noexcept + { + static const std::array<std::uint8_t, 400> utf8d = + { + { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF + 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF + 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8 + } + }; + + JSON_ASSERT(byte < utf8d.size()); + const std::uint8_t type = utf8d[byte]; + + codep = (state != UTF8_ACCEPT) + ? (byte & 0x3fu) | (codep << 6u) + : (0xFFu >> type) & (byte); + + std::size_t index = 256u + static_cast<size_t>(state) * 16u + static_cast<size_t>(type); + JSON_ASSERT(index < 400); + state = utf8d[index]; + return state; + } + + /* + * Overload to make the compiler happy while it is instantiating + * dump_integer for number_unsigned_t. + * Must never be called. + */ + number_unsigned_t remove_sign(number_unsigned_t x) + { + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + return x; // LCOV_EXCL_LINE + } + + /* + * Helper function for dump_integer + * + * This function takes a negative signed integer and returns its absolute + * value as unsigned integer. The plus/minus shuffling is necessary as we can + * not directly remove the sign of an arbitrary signed integer as the + * absolute values of INT_MIN and INT_MAX are usually not the same. See + * #1708 for details. + */ + inline number_unsigned_t remove_sign(number_integer_t x) noexcept + { + JSON_ASSERT(x < 0 && x < (std::numeric_limits<number_integer_t>::max)()); // NOLINT(misc-redundant-expression) + return static_cast<number_unsigned_t>(-(x + 1)) + 1; + } + + private: + /// the output of the serializer + output_adapter_t<char> o = nullptr; + + /// a (hopefully) large enough character buffer + std::array<char, 64> number_buffer{{}}; + + /// the locale + const std::lconv* loc = nullptr; + /// the locale's thousand separator character + const char thousands_sep = '\0'; + /// the locale's decimal point character + const char decimal_point = '\0'; + + /// string buffer + std::array<char, 512> string_buffer{{}}; + + /// the indentation character + const char indent_char; + /// the indentation string + string_t indent_string; + + /// error_handler how to react on decoding errors + const error_handler_t error_handler; +}; +} // namespace detail +} // namespace nlohmann + +// #include <nlohmann/detail/value_t.hpp> + +// #include <nlohmann/json_fwd.hpp> + +// #include <nlohmann/ordered_map.hpp> + + +#include <functional> // equal_to, less +#include <initializer_list> // initializer_list +#include <iterator> // input_iterator_tag, iterator_traits +#include <memory> // allocator +#include <stdexcept> // for out_of_range +#include <type_traits> // enable_if, is_convertible +#include <utility> // pair +#include <vector> // vector + +// #include <nlohmann/detail/macro_scope.hpp> + + +namespace nlohmann +{ + +/// ordered_map: a minimal map-like container that preserves insertion order +/// for use within nlohmann::basic_json<ordered_map> +template <class Key, class T, class IgnoredLess = std::less<Key>, + class Allocator = std::allocator<std::pair<const Key, T>>> + struct ordered_map : std::vector<std::pair<const Key, T>, Allocator> +{ + using key_type = Key; + using mapped_type = T; + using Container = std::vector<std::pair<const Key, T>, Allocator>; + using iterator = typename Container::iterator; + using const_iterator = typename Container::const_iterator; + using size_type = typename Container::size_type; + using value_type = typename Container::value_type; +#ifdef JSON_HAS_CPP_14 + using key_compare = std::equal_to<>; +#else + using key_compare = std::equal_to<Key>; +#endif + + // Explicit constructors instead of `using Container::Container` + // otherwise older compilers choke on it (GCC <= 5.5, xcode <= 9.4) + ordered_map() noexcept(noexcept(Container())) : Container{} {} + explicit ordered_map(const Allocator& alloc) noexcept(noexcept(Container(alloc))) : Container{alloc} {} + template <class It> + ordered_map(It first, It last, const Allocator& alloc = Allocator()) + : Container{first, last, alloc} {} + ordered_map(std::initializer_list<value_type> init, const Allocator& alloc = Allocator() ) + : Container{init, alloc} {} + + std::pair<iterator, bool> emplace(const key_type& key, T&& t) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return {it, false}; + } + } + Container::emplace_back(key, t); + return {--this->end(), true}; + } + + T& operator[](const Key& key) + { + return emplace(key, T{}).first->second; + } + + const T& operator[](const Key& key) const + { + return at(key); + } + + T& at(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it->second; + } + } + + JSON_THROW(std::out_of_range("key not found")); + } + + const T& at(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it->second; + } + } + + JSON_THROW(std::out_of_range("key not found")); + } + + size_type erase(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + // Since we cannot move const Keys, re-construct them in place + for (auto next = it; ++next != this->end(); ++it) + { + it->~value_type(); // Destroy but keep allocation + new (&*it) value_type{std::move(*next)}; + } + Container::pop_back(); + return 1; + } + } + return 0; + } + + iterator erase(iterator pos) + { + return erase(pos, std::next(pos)); + } + + iterator erase(iterator first, iterator last) + { + const auto elements_affected = std::distance(first, last); + const auto offset = std::distance(Container::begin(), first); + + // This is the start situation. We need to delete elements_affected + // elements (3 in this example: e, f, g), and need to return an + // iterator past the last deleted element (h in this example). + // Note that offset is the distance from the start of the vector + // to first. We will need this later. + + // [ a, b, c, d, e, f, g, h, i, j ] + // ^ ^ + // first last + + // Since we cannot move const Keys, we re-construct them in place. + // We start at first and re-construct (viz. copy) the elements from + // the back of the vector. Example for first iteration: + + // ,--------. + // v | destroy e and re-construct with h + // [ a, b, c, d, e, f, g, h, i, j ] + // ^ ^ + // it it + elements_affected + + for (auto it = first; std::next(it, elements_affected) != Container::end(); ++it) + { + it->~value_type(); // destroy but keep allocation + new (&*it) value_type{std::move(*std::next(it, elements_affected))}; // "move" next element to it + } + + // [ a, b, c, d, h, i, j, h, i, j ] + // ^ ^ + // first last + + // remove the unneeded elements at the end of the vector + Container::resize(this->size() - static_cast<size_type>(elements_affected)); + + // [ a, b, c, d, h, i, j ] + // ^ ^ + // first last + + // first is now pointing past the last deleted element, but we cannot + // use this iterator, because it may have been invalidated by the + // resize call. Instead, we can return begin() + offset. + return Container::begin() + offset; + } + + size_type count(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return 1; + } + } + return 0; + } + + iterator find(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it; + } + } + return Container::end(); + } + + const_iterator find(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, key)) + { + return it; + } + } + return Container::end(); + } + + std::pair<iterator, bool> insert( value_type&& value ) + { + return emplace(value.first, std::move(value.second)); + } + + std::pair<iterator, bool> insert( const value_type& value ) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (m_compare(it->first, value.first)) + { + return {it, false}; + } + } + Container::push_back(value); + return {--this->end(), true}; + } + + template<typename InputIt> + using require_input_iter = typename std::enable_if<std::is_convertible<typename std::iterator_traits<InputIt>::iterator_category, + std::input_iterator_tag>::value>::type; + + template<typename InputIt, typename = require_input_iter<InputIt>> + void insert(InputIt first, InputIt last) + { + for (auto it = first; it != last; ++it) + { + insert(*it); + } + } + +private: + JSON_NO_UNIQUE_ADDRESS key_compare m_compare = key_compare(); +}; + +} // namespace nlohmann + + +#if defined(JSON_HAS_CPP_17) + #include <any> + #include <string_view> +#endif + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ + +/*! +@brief a class to store JSON values + +@internal +@invariant The member variables @a m_value and @a m_type have the following +relationship: +- If `m_type == value_t::object`, then `m_value.object != nullptr`. +- If `m_type == value_t::array`, then `m_value.array != nullptr`. +- If `m_type == value_t::string`, then `m_value.string != nullptr`. +The invariants are checked by member function assert_invariant(). + +@note ObjectType trick from https://stackoverflow.com/a/9860911 +@endinternal + +@since version 1.0.0 + +@nosubgrouping +*/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +class basic_json // NOLINT(cppcoreguidelines-special-member-functions,hicpp-special-member-functions) +{ + private: + template<detail::value_t> friend struct detail::external_constructor; + + template<typename> + friend class ::nlohmann::json_pointer; + // can be restored when json_pointer backwards compatibility is removed + // friend ::nlohmann::json_pointer<StringType>; + + template<typename BasicJsonType, typename InputType> + friend class ::nlohmann::detail::parser; + friend ::nlohmann::detail::serializer<basic_json>; + template<typename BasicJsonType> + friend class ::nlohmann::detail::iter_impl; + template<typename BasicJsonType, typename CharType> + friend class ::nlohmann::detail::binary_writer; + template<typename BasicJsonType, typename InputType, typename SAX> + friend class ::nlohmann::detail::binary_reader; + template<typename BasicJsonType> + friend class ::nlohmann::detail::json_sax_dom_parser; + template<typename BasicJsonType> + friend class ::nlohmann::detail::json_sax_dom_callback_parser; + friend class ::nlohmann::detail::exception; + + /// workaround type for MSVC + using basic_json_t = NLOHMANN_BASIC_JSON_TPL; + + JSON_PRIVATE_UNLESS_TESTED: + // convenience aliases for types residing in namespace detail; + using lexer = ::nlohmann::detail::lexer_base<basic_json>; + + template<typename InputAdapterType> + static ::nlohmann::detail::parser<basic_json, InputAdapterType> parser( + InputAdapterType adapter, + detail::parser_callback_t<basic_json>cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false + ) + { + return ::nlohmann::detail::parser<basic_json, InputAdapterType>(std::move(adapter), + std::move(cb), allow_exceptions, ignore_comments); + } + + private: + using primitive_iterator_t = ::nlohmann::detail::primitive_iterator_t; + template<typename BasicJsonType> + using internal_iterator = ::nlohmann::detail::internal_iterator<BasicJsonType>; + template<typename BasicJsonType> + using iter_impl = ::nlohmann::detail::iter_impl<BasicJsonType>; + template<typename Iterator> + using iteration_proxy = ::nlohmann::detail::iteration_proxy<Iterator>; + template<typename Base> using json_reverse_iterator = ::nlohmann::detail::json_reverse_iterator<Base>; + + template<typename CharType> + using output_adapter_t = ::nlohmann::detail::output_adapter_t<CharType>; + + template<typename InputType> + using binary_reader = ::nlohmann::detail::binary_reader<basic_json, InputType>; + template<typename CharType> using binary_writer = ::nlohmann::detail::binary_writer<basic_json, CharType>; + + JSON_PRIVATE_UNLESS_TESTED: + using serializer = ::nlohmann::detail::serializer<basic_json>; + + public: + using value_t = detail::value_t; + /// JSON Pointer, see @ref nlohmann::json_pointer + using json_pointer = ::nlohmann::json_pointer<StringType>; + template<typename T, typename SFINAE> + using json_serializer = JSONSerializer<T, SFINAE>; + /// how to treat decoding errors + using error_handler_t = detail::error_handler_t; + /// how to treat CBOR tags + using cbor_tag_handler_t = detail::cbor_tag_handler_t; + /// helper type for initializer lists of basic_json values + using initializer_list_t = std::initializer_list<detail::json_ref<basic_json>>; + + using input_format_t = detail::input_format_t; + /// SAX interface type, see @ref nlohmann::json_sax + using json_sax_t = json_sax<basic_json>; + + //////////////// + // exceptions // + //////////////// + + /// @name exceptions + /// Classes to implement user-defined exceptions. + /// @{ + + using exception = detail::exception; + using parse_error = detail::parse_error; + using invalid_iterator = detail::invalid_iterator; + using type_error = detail::type_error; + using out_of_range = detail::out_of_range; + using other_error = detail::other_error; + + /// @} + + + ///////////////////// + // container types // + ///////////////////// + + /// @name container types + /// The canonic container types to use @ref basic_json like any other STL + /// container. + /// @{ + + /// the type of elements in a basic_json container + using value_type = basic_json; + + /// the type of an element reference + using reference = value_type&; + /// the type of an element const reference + using const_reference = const value_type&; + + /// a type to represent differences between iterators + using difference_type = std::ptrdiff_t; + /// a type to represent container sizes + using size_type = std::size_t; + + /// the allocator type + using allocator_type = AllocatorType<basic_json>; + + /// the type of an element pointer + using pointer = typename std::allocator_traits<allocator_type>::pointer; + /// the type of an element const pointer + using const_pointer = typename std::allocator_traits<allocator_type>::const_pointer; + + /// an iterator for a basic_json container + using iterator = iter_impl<basic_json>; + /// a const iterator for a basic_json container + using const_iterator = iter_impl<const basic_json>; + /// a reverse iterator for a basic_json container + using reverse_iterator = json_reverse_iterator<typename basic_json::iterator>; + /// a const reverse iterator for a basic_json container + using const_reverse_iterator = json_reverse_iterator<typename basic_json::const_iterator>; + + /// @} + + + /// @brief returns the allocator associated with the container + /// @sa https://json.nlohmann.me/api/basic_json/get_allocator/ + static allocator_type get_allocator() + { + return allocator_type(); + } + + /// @brief returns version information on the library + /// @sa https://json.nlohmann.me/api/basic_json/meta/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json meta() + { + basic_json result; + + result["copyright"] = "(C) 2013-2022 Niels Lohmann"; + result["name"] = "JSON for Modern C++"; + result["url"] = "https://github.com/nlohmann/json"; + result["version"]["string"] = + detail::concat(std::to_string(NLOHMANN_JSON_VERSION_MAJOR), '.', + std::to_string(NLOHMANN_JSON_VERSION_MINOR), '.', + std::to_string(NLOHMANN_JSON_VERSION_PATCH)); + result["version"]["major"] = NLOHMANN_JSON_VERSION_MAJOR; + result["version"]["minor"] = NLOHMANN_JSON_VERSION_MINOR; + result["version"]["patch"] = NLOHMANN_JSON_VERSION_PATCH; + +#ifdef _WIN32 + result["platform"] = "win32"; +#elif defined __linux__ + result["platform"] = "linux"; +#elif defined __APPLE__ + result["platform"] = "apple"; +#elif defined __unix__ + result["platform"] = "unix"; +#else + result["platform"] = "unknown"; +#endif + +#if defined(__ICC) || defined(__INTEL_COMPILER) + result["compiler"] = {{"family", "icc"}, {"version", __INTEL_COMPILER}}; +#elif defined(__clang__) + result["compiler"] = {{"family", "clang"}, {"version", __clang_version__}}; +#elif defined(__GNUC__) || defined(__GNUG__) + result["compiler"] = {{"family", "gcc"}, {"version", detail::concat( + std::to_string(__GNUC__), '.', + std::to_string(__GNUC_MINOR__), '.', + std::to_string(__GNUC_PATCHLEVEL__)) + } + }; +#elif defined(__HP_cc) || defined(__HP_aCC) + result["compiler"] = "hp" +#elif defined(__IBMCPP__) + result["compiler"] = {{"family", "ilecpp"}, {"version", __IBMCPP__}}; +#elif defined(_MSC_VER) + result["compiler"] = {{"family", "msvc"}, {"version", _MSC_VER}}; +#elif defined(__PGI) + result["compiler"] = {{"family", "pgcpp"}, {"version", __PGI}}; +#elif defined(__SUNPRO_CC) + result["compiler"] = {{"family", "sunpro"}, {"version", __SUNPRO_CC}}; +#else + result["compiler"] = {{"family", "unknown"}, {"version", "unknown"}}; +#endif + + +#if defined(_MSVC_LANG) + result["compiler"]["c++"] = std::to_string(_MSVC_LANG); +#elif defined(__cplusplus) + result["compiler"]["c++"] = std::to_string(__cplusplus); +#else + result["compiler"]["c++"] = "unknown"; +#endif + return result; + } + + + /////////////////////////// + // JSON value data types // + /////////////////////////// + + /// @name JSON value data types + /// The data types to store a JSON value. These types are derived from + /// the template arguments passed to class @ref basic_json. + /// @{ + + /// @brief default object key comparator type + /// The actual object key comparator type (@ref object_comparator_t) may be + /// different. + /// @sa https://json.nlohmann.me/api/basic_json/default_object_comparator_t/ +#if defined(JSON_HAS_CPP_14) + // use of transparent comparator avoids unnecessary repeated construction of temporaries + // in functions involving lookup by key with types other than object_t::key_type (aka. StringType) + using default_object_comparator_t = std::less<>; +#else + using default_object_comparator_t = std::less<StringType>; +#endif + + /// @brief a type for an object + /// @sa https://json.nlohmann.me/api/basic_json/object_t/ + using object_t = ObjectType<StringType, + basic_json, + default_object_comparator_t, + AllocatorType<std::pair<const StringType, + basic_json>>>; + + /// @brief a type for an array + /// @sa https://json.nlohmann.me/api/basic_json/array_t/ + using array_t = ArrayType<basic_json, AllocatorType<basic_json>>; + + /// @brief a type for a string + /// @sa https://json.nlohmann.me/api/basic_json/string_t/ + using string_t = StringType; + + /// @brief a type for a boolean + /// @sa https://json.nlohmann.me/api/basic_json/boolean_t/ + using boolean_t = BooleanType; + + /// @brief a type for a number (integer) + /// @sa https://json.nlohmann.me/api/basic_json/number_integer_t/ + using number_integer_t = NumberIntegerType; + + /// @brief a type for a number (unsigned) + /// @sa https://json.nlohmann.me/api/basic_json/number_unsigned_t/ + using number_unsigned_t = NumberUnsignedType; + + /// @brief a type for a number (floating-point) + /// @sa https://json.nlohmann.me/api/basic_json/number_float_t/ + using number_float_t = NumberFloatType; + + /// @brief a type for a packed binary type + /// @sa https://json.nlohmann.me/api/basic_json/binary_t/ + using binary_t = nlohmann::byte_container_with_subtype<BinaryType>; + + /// @brief object key comparator type + /// @sa https://json.nlohmann.me/api/basic_json/object_comparator_t/ + using object_comparator_t = detail::actual_object_comparator_t<basic_json>; + + /// @} + + private: + + /// helper for exception-safe object creation + template<typename T, typename... Args> + JSON_HEDLEY_RETURNS_NON_NULL + static T* create(Args&& ... args) + { + AllocatorType<T> alloc; + using AllocatorTraits = std::allocator_traits<AllocatorType<T>>; + + auto deleter = [&](T * obj) + { + AllocatorTraits::deallocate(alloc, obj, 1); + }; + std::unique_ptr<T, decltype(deleter)> obj(AllocatorTraits::allocate(alloc, 1), deleter); + AllocatorTraits::construct(alloc, obj.get(), std::forward<Args>(args)...); + JSON_ASSERT(obj != nullptr); + return obj.release(); + } + + //////////////////////// + // JSON value storage // + //////////////////////// + + JSON_PRIVATE_UNLESS_TESTED: + /*! + @brief a JSON value + + The actual storage for a JSON value of the @ref basic_json class. This + union combines the different storage types for the JSON value types + defined in @ref value_t. + + JSON type | value_t type | used type + --------- | --------------- | ------------------------ + object | object | pointer to @ref object_t + array | array | pointer to @ref array_t + string | string | pointer to @ref string_t + boolean | boolean | @ref boolean_t + number | number_integer | @ref number_integer_t + number | number_unsigned | @ref number_unsigned_t + number | number_float | @ref number_float_t + binary | binary | pointer to @ref binary_t + null | null | *no value is stored* + + @note Variable-length types (objects, arrays, and strings) are stored as + pointers. The size of the union should not exceed 64 bits if the default + value types are used. + + @since version 1.0.0 + */ + union json_value + { + /// object (stored with pointer to save storage) + object_t* object; + /// array (stored with pointer to save storage) + array_t* array; + /// string (stored with pointer to save storage) + string_t* string; + /// binary (stored with pointer to save storage) + binary_t* binary; + /// boolean + boolean_t boolean; + /// number (integer) + number_integer_t number_integer; + /// number (unsigned integer) + number_unsigned_t number_unsigned; + /// number (floating-point) + number_float_t number_float; + + /// default constructor (for null values) + json_value() = default; + /// constructor for booleans + json_value(boolean_t v) noexcept : boolean(v) {} + /// constructor for numbers (integer) + json_value(number_integer_t v) noexcept : number_integer(v) {} + /// constructor for numbers (unsigned) + json_value(number_unsigned_t v) noexcept : number_unsigned(v) {} + /// constructor for numbers (floating-point) + json_value(number_float_t v) noexcept : number_float(v) {} + /// constructor for empty values of a given type + json_value(value_t t) + { + switch (t) + { + case value_t::object: + { + object = create<object_t>(); + break; + } + + case value_t::array: + { + array = create<array_t>(); + break; + } + + case value_t::string: + { + string = create<string_t>(""); + break; + } + + case value_t::binary: + { + binary = create<binary_t>(); + break; + } + + case value_t::boolean: + { + boolean = static_cast<boolean_t>(false); + break; + } + + case value_t::number_integer: + { + number_integer = static_cast<number_integer_t>(0); + break; + } + + case value_t::number_unsigned: + { + number_unsigned = static_cast<number_unsigned_t>(0); + break; + } + + case value_t::number_float: + { + number_float = static_cast<number_float_t>(0.0); + break; + } + + case value_t::null: + { + object = nullptr; // silence warning, see #821 + break; + } + + case value_t::discarded: + default: + { + object = nullptr; // silence warning, see #821 + if (JSON_HEDLEY_UNLIKELY(t == value_t::null)) + { + JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.10.5", nullptr)); // LCOV_EXCL_LINE + } + break; + } + } + } + + /// constructor for strings + json_value(const string_t& value) : string(create<string_t>(value)) {} + + /// constructor for rvalue strings + json_value(string_t&& value) : string(create<string_t>(std::move(value))) {} + + /// constructor for objects + json_value(const object_t& value) : object(create<object_t>(value)) {} + + /// constructor for rvalue objects + json_value(object_t&& value) : object(create<object_t>(std::move(value))) {} + + /// constructor for arrays + json_value(const array_t& value) : array(create<array_t>(value)) {} + + /// constructor for rvalue arrays + json_value(array_t&& value) : array(create<array_t>(std::move(value))) {} + + /// constructor for binary arrays + json_value(const typename binary_t::container_type& value) : binary(create<binary_t>(value)) {} + + /// constructor for rvalue binary arrays + json_value(typename binary_t::container_type&& value) : binary(create<binary_t>(std::move(value))) {} + + /// constructor for binary arrays (internal type) + json_value(const binary_t& value) : binary(create<binary_t>(value)) {} + + /// constructor for rvalue binary arrays (internal type) + json_value(binary_t&& value) : binary(create<binary_t>(std::move(value))) {} + + void destroy(value_t t) + { + if (t == value_t::array || t == value_t::object) + { + // flatten the current json_value to a heap-allocated stack + std::vector<basic_json> stack; + + // move the top-level items to stack + if (t == value_t::array) + { + stack.reserve(array->size()); + std::move(array->begin(), array->end(), std::back_inserter(stack)); + } + else + { + stack.reserve(object->size()); + for (auto&& it : *object) + { + stack.push_back(std::move(it.second)); + } + } + + while (!stack.empty()) + { + // move the last item to local variable to be processed + basic_json current_item(std::move(stack.back())); + stack.pop_back(); + + // if current_item is array/object, move + // its children to the stack to be processed later + if (current_item.is_array()) + { + std::move(current_item.m_value.array->begin(), current_item.m_value.array->end(), std::back_inserter(stack)); + + current_item.m_value.array->clear(); + } + else if (current_item.is_object()) + { + for (auto&& it : *current_item.m_value.object) + { + stack.push_back(std::move(it.second)); + } + + current_item.m_value.object->clear(); + } + + // it's now safe that current_item get destructed + // since it doesn't have any children + } + } + + switch (t) + { + case value_t::object: + { + AllocatorType<object_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, object); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, object, 1); + break; + } + + case value_t::array: + { + AllocatorType<array_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, array); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, array, 1); + break; + } + + case value_t::string: + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, string, 1); + break; + } + + case value_t::binary: + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, binary, 1); + break; + } + + case value_t::null: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::discarded: + default: + { + break; + } + } + } + }; + + private: + /*! + @brief checks the class invariants + + This function asserts the class invariants. It needs to be called at the + end of every constructor to make sure that created objects respect the + invariant. Furthermore, it has to be called each time the type of a JSON + value is changed, because the invariant expresses a relationship between + @a m_type and @a m_value. + + Furthermore, the parent relation is checked for arrays and objects: If + @a check_parents true and the value is an array or object, then the + container's elements must have the current value as parent. + + @param[in] check_parents whether the parent relation should be checked. + The value is true by default and should only be set to false + during destruction of objects when the invariant does not + need to hold. + */ + void assert_invariant(bool check_parents = true) const noexcept + { + JSON_ASSERT(m_type != value_t::object || m_value.object != nullptr); + JSON_ASSERT(m_type != value_t::array || m_value.array != nullptr); + JSON_ASSERT(m_type != value_t::string || m_value.string != nullptr); + JSON_ASSERT(m_type != value_t::binary || m_value.binary != nullptr); + +#if JSON_DIAGNOSTICS + JSON_TRY + { + // cppcheck-suppress assertWithSideEffect + JSON_ASSERT(!check_parents || !is_structured() || std::all_of(begin(), end(), [this](const basic_json & j) + { + return j.m_parent == this; + })); + } + JSON_CATCH(...) {} // LCOV_EXCL_LINE +#endif + static_cast<void>(check_parents); + } + + void set_parents() + { +#if JSON_DIAGNOSTICS + switch (m_type) + { + case value_t::array: + { + for (auto& element : *m_value.array) + { + element.m_parent = this; + } + break; + } + + case value_t::object: + { + for (auto& element : *m_value.object) + { + element.second.m_parent = this; + } + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + break; + } +#endif + } + + iterator set_parents(iterator it, typename iterator::difference_type count_set_parents) + { +#if JSON_DIAGNOSTICS + for (typename iterator::difference_type i = 0; i < count_set_parents; ++i) + { + (it + i)->m_parent = this; + } +#else + static_cast<void>(count_set_parents); +#endif + return it; + } + + reference set_parent(reference j, std::size_t old_capacity = static_cast<std::size_t>(-1)) + { +#if JSON_DIAGNOSTICS + if (old_capacity != static_cast<std::size_t>(-1)) + { + // see https://github.com/nlohmann/json/issues/2838 + JSON_ASSERT(type() == value_t::array); + if (JSON_HEDLEY_UNLIKELY(m_value.array->capacity() != old_capacity)) + { + // capacity has changed: update all parents + set_parents(); + return j; + } + } + + // ordered_json uses a vector internally, so pointers could have + // been invalidated; see https://github.com/nlohmann/json/issues/2962 +#ifdef JSON_HEDLEY_MSVC_VERSION +#pragma warning(push ) +#pragma warning(disable : 4127) // ignore warning to replace if with if constexpr +#endif + if (detail::is_ordered_map<object_t>::value) + { + set_parents(); + return j; + } +#ifdef JSON_HEDLEY_MSVC_VERSION +#pragma warning( pop ) +#endif + + j.m_parent = this; +#else + static_cast<void>(j); + static_cast<void>(old_capacity); +#endif + return j; + } + + public: + ////////////////////////// + // JSON parser callback // + ////////////////////////// + + /// @brief parser event types + /// @sa https://json.nlohmann.me/api/basic_json/parse_event_t/ + using parse_event_t = detail::parse_event_t; + + /// @brief per-element parser callback type + /// @sa https://json.nlohmann.me/api/basic_json/parser_callback_t/ + using parser_callback_t = detail::parser_callback_t<basic_json>; + + ////////////////// + // constructors // + ////////////////// + + /// @name constructors and destructors + /// Constructors of class @ref basic_json, copy/move constructor, copy + /// assignment, static functions creating objects, and the destructor. + /// @{ + + /// @brief create an empty value with a given type + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(const value_t v) + : m_type(v), m_value(v) + { + assert_invariant(); + } + + /// @brief create a null object + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(std::nullptr_t = nullptr) noexcept // NOLINT(bugprone-exception-escape) + : basic_json(value_t::null) + { + assert_invariant(); + } + + /// @brief create a JSON value from compatible types + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < typename CompatibleType, + typename U = detail::uncvref_t<CompatibleType>, + detail::enable_if_t < + !detail::is_basic_json<U>::value && detail::is_compatible_type<basic_json_t, U>::value, int > = 0 > + basic_json(CompatibleType && val) noexcept(noexcept( // NOLINT(bugprone-forwarding-reference-overload,bugprone-exception-escape) + JSONSerializer<U>::to_json(std::declval<basic_json_t&>(), + std::forward<CompatibleType>(val)))) + { + JSONSerializer<U>::to_json(*this, std::forward<CompatibleType>(val)); + set_parents(); + assert_invariant(); + } + + /// @brief create a JSON value from an existing one + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < typename BasicJsonType, + detail::enable_if_t < + detail::is_basic_json<BasicJsonType>::value&& !std::is_same<basic_json, BasicJsonType>::value, int > = 0 > + basic_json(const BasicJsonType& val) + { + using other_boolean_t = typename BasicJsonType::boolean_t; + using other_number_float_t = typename BasicJsonType::number_float_t; + using other_number_integer_t = typename BasicJsonType::number_integer_t; + using other_number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using other_string_t = typename BasicJsonType::string_t; + using other_object_t = typename BasicJsonType::object_t; + using other_array_t = typename BasicJsonType::array_t; + using other_binary_t = typename BasicJsonType::binary_t; + + switch (val.type()) + { + case value_t::boolean: + JSONSerializer<other_boolean_t>::to_json(*this, val.template get<other_boolean_t>()); + break; + case value_t::number_float: + JSONSerializer<other_number_float_t>::to_json(*this, val.template get<other_number_float_t>()); + break; + case value_t::number_integer: + JSONSerializer<other_number_integer_t>::to_json(*this, val.template get<other_number_integer_t>()); + break; + case value_t::number_unsigned: + JSONSerializer<other_number_unsigned_t>::to_json(*this, val.template get<other_number_unsigned_t>()); + break; + case value_t::string: + JSONSerializer<other_string_t>::to_json(*this, val.template get_ref<const other_string_t&>()); + break; + case value_t::object: + JSONSerializer<other_object_t>::to_json(*this, val.template get_ref<const other_object_t&>()); + break; + case value_t::array: + JSONSerializer<other_array_t>::to_json(*this, val.template get_ref<const other_array_t&>()); + break; + case value_t::binary: + JSONSerializer<other_binary_t>::to_json(*this, val.template get_ref<const other_binary_t&>()); + break; + case value_t::null: + *this = nullptr; + break; + case value_t::discarded: + m_type = value_t::discarded; + break; + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + JSON_ASSERT(m_type == val.type()); + set_parents(); + assert_invariant(); + } + + /// @brief create a container (array or object) from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(initializer_list_t init, + bool type_deduction = true, + value_t manual_type = value_t::array) + { + // check if each element is an array with two elements whose first + // element is a string + bool is_an_object = std::all_of(init.begin(), init.end(), + [](const detail::json_ref<basic_json>& element_ref) + { + return element_ref->is_array() && element_ref->size() == 2 && (*element_ref)[0].is_string(); + }); + + // adjust type if type deduction is not wanted + if (!type_deduction) + { + // if array is wanted, do not create an object though possible + if (manual_type == value_t::array) + { + is_an_object = false; + } + + // if object is wanted but impossible, throw an exception + if (JSON_HEDLEY_UNLIKELY(manual_type == value_t::object && !is_an_object)) + { + JSON_THROW(type_error::create(301, "cannot create object from initializer list", nullptr)); + } + } + + if (is_an_object) + { + // the initializer list is a list of pairs -> create object + m_type = value_t::object; + m_value = value_t::object; + + for (auto& element_ref : init) + { + auto element = element_ref.moved_or_copied(); + m_value.object->emplace( + std::move(*((*element.m_value.array)[0].m_value.string)), + std::move((*element.m_value.array)[1])); + } + } + else + { + // the initializer list describes an array -> create array + m_type = value_t::array; + m_value.array = create<array_t>(init.begin(), init.end()); + } + + set_parents(); + assert_invariant(); + } + + /// @brief explicitly create a binary array (without subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = init; + return res; + } + + /// @brief explicitly create a binary array (with subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init, typename binary_t::subtype_type subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(init, subtype); + return res; + } + + /// @brief explicitly create a binary array + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = std::move(init); + return res; + } + + /// @brief explicitly create a binary array (with subtype) + /// @sa https://json.nlohmann.me/api/basic_json/binary/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init, typename binary_t::subtype_type subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(std::move(init), subtype); + return res; + } + + /// @brief explicitly create an array from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/array/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json array(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::array); + } + + /// @brief explicitly create an object from an initializer list + /// @sa https://json.nlohmann.me/api/basic_json/object/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json object(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::object); + } + + /// @brief construct an array with count copies of given value + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(size_type cnt, const basic_json& val) + : m_type(value_t::array) + { + m_value.array = create<array_t>(cnt, val); + set_parents(); + assert_invariant(); + } + + /// @brief construct a JSON container given an iterator range + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + template < class InputIT, typename std::enable_if < + std::is_same<InputIT, typename basic_json_t::iterator>::value || + std::is_same<InputIT, typename basic_json_t::const_iterator>::value, int >::type = 0 > + basic_json(InputIT first, InputIT last) + { + JSON_ASSERT(first.m_object != nullptr); + JSON_ASSERT(last.m_object != nullptr); + + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(201, "iterators are not compatible", nullptr)); + } + + // copy type from first iterator + m_type = first.m_object->m_type; + + // check if iterator range is complete for primitive values + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + { + if (JSON_HEDLEY_UNLIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range", first.m_object)); + } + break; + } + + case value_t::null: + case value_t::object: + case value_t::array: + case value_t::binary: + case value_t::discarded: + default: + break; + } + + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = first.m_object->m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = first.m_object->m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value.number_float = first.m_object->m_value.number_float; + break; + } + + case value_t::boolean: + { + m_value.boolean = first.m_object->m_value.boolean; + break; + } + + case value_t::string: + { + m_value = *first.m_object->m_value.string; + break; + } + + case value_t::object: + { + m_value.object = create<object_t>(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + m_value.array = create<array_t>(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + case value_t::binary: + { + m_value = *first.m_object->m_value.binary; + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(invalid_iterator::create(206, detail::concat("cannot construct with iterators from ", first.m_object->type_name()), first.m_object)); + } + + set_parents(); + assert_invariant(); + } + + + /////////////////////////////////////// + // other constructors and destructor // + /////////////////////////////////////// + + template<typename JsonRef, + detail::enable_if_t<detail::conjunction<detail::is_json_ref<JsonRef>, + std::is_same<typename JsonRef::value_type, basic_json>>::value, int> = 0 > + basic_json(const JsonRef& ref) : basic_json(ref.moved_or_copied()) {} + + /// @brief copy constructor + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(const basic_json& other) + : m_type(other.m_type) + { + // check of passed value is valid + other.assert_invariant(); + + switch (m_type) + { + case value_t::object: + { + m_value = *other.m_value.object; + break; + } + + case value_t::array: + { + m_value = *other.m_value.array; + break; + } + + case value_t::string: + { + m_value = *other.m_value.string; + break; + } + + case value_t::boolean: + { + m_value = other.m_value.boolean; + break; + } + + case value_t::number_integer: + { + m_value = other.m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value = other.m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value = other.m_value.number_float; + break; + } + + case value_t::binary: + { + m_value = *other.m_value.binary; + break; + } + + case value_t::null: + case value_t::discarded: + default: + break; + } + + set_parents(); + assert_invariant(); + } + + /// @brief move constructor + /// @sa https://json.nlohmann.me/api/basic_json/basic_json/ + basic_json(basic_json&& other) noexcept + : m_type(std::move(other.m_type)), + m_value(std::move(other.m_value)) + { + // check that passed value is valid + other.assert_invariant(false); + + // invalidate payload + other.m_type = value_t::null; + other.m_value = {}; + + set_parents(); + assert_invariant(); + } + + /// @brief copy assignment + /// @sa https://json.nlohmann.me/api/basic_json/operator=/ + basic_json& operator=(basic_json other) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + // check that passed value is valid + other.assert_invariant(); + + using std::swap; + swap(m_type, other.m_type); + swap(m_value, other.m_value); + + set_parents(); + assert_invariant(); + return *this; + } + + /// @brief destructor + /// @sa https://json.nlohmann.me/api/basic_json/~basic_json/ + ~basic_json() noexcept + { + assert_invariant(false); + m_value.destroy(m_type); + } + + /// @} + + public: + /////////////////////// + // object inspection // + /////////////////////// + + /// @name object inspection + /// Functions to inspect the type of a JSON value. + /// @{ + + /// @brief serialization + /// @sa https://json.nlohmann.me/api/basic_json/dump/ + string_t dump(const int indent = -1, + const char indent_char = ' ', + const bool ensure_ascii = false, + const error_handler_t error_handler = error_handler_t::strict) const + { + string_t result; + serializer s(detail::output_adapter<char, string_t>(result), indent_char, error_handler); + + if (indent >= 0) + { + s.dump(*this, true, ensure_ascii, static_cast<unsigned int>(indent)); + } + else + { + s.dump(*this, false, ensure_ascii, 0); + } + + return result; + } + + /// @brief return the type of the JSON value (explicit) + /// @sa https://json.nlohmann.me/api/basic_json/type/ + constexpr value_t type() const noexcept + { + return m_type; + } + + /// @brief return whether type is primitive + /// @sa https://json.nlohmann.me/api/basic_json/is_primitive/ + constexpr bool is_primitive() const noexcept + { + return is_null() || is_string() || is_boolean() || is_number() || is_binary(); + } + + /// @brief return whether type is structured + /// @sa https://json.nlohmann.me/api/basic_json/is_structured/ + constexpr bool is_structured() const noexcept + { + return is_array() || is_object(); + } + + /// @brief return whether value is null + /// @sa https://json.nlohmann.me/api/basic_json/is_null/ + constexpr bool is_null() const noexcept + { + return m_type == value_t::null; + } + + /// @brief return whether value is a boolean + /// @sa https://json.nlohmann.me/api/basic_json/is_boolean/ + constexpr bool is_boolean() const noexcept + { + return m_type == value_t::boolean; + } + + /// @brief return whether value is a number + /// @sa https://json.nlohmann.me/api/basic_json/is_number/ + constexpr bool is_number() const noexcept + { + return is_number_integer() || is_number_float(); + } + + /// @brief return whether value is an integer number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_integer/ + constexpr bool is_number_integer() const noexcept + { + return m_type == value_t::number_integer || m_type == value_t::number_unsigned; + } + + /// @brief return whether value is an unsigned integer number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_unsigned/ + constexpr bool is_number_unsigned() const noexcept + { + return m_type == value_t::number_unsigned; + } + + /// @brief return whether value is a floating-point number + /// @sa https://json.nlohmann.me/api/basic_json/is_number_float/ + constexpr bool is_number_float() const noexcept + { + return m_type == value_t::number_float; + } + + /// @brief return whether value is an object + /// @sa https://json.nlohmann.me/api/basic_json/is_object/ + constexpr bool is_object() const noexcept + { + return m_type == value_t::object; + } + + /// @brief return whether value is an array + /// @sa https://json.nlohmann.me/api/basic_json/is_array/ + constexpr bool is_array() const noexcept + { + return m_type == value_t::array; + } + + /// @brief return whether value is a string + /// @sa https://json.nlohmann.me/api/basic_json/is_string/ + constexpr bool is_string() const noexcept + { + return m_type == value_t::string; + } + + /// @brief return whether value is a binary array + /// @sa https://json.nlohmann.me/api/basic_json/is_binary/ + constexpr bool is_binary() const noexcept + { + return m_type == value_t::binary; + } + + /// @brief return whether value is discarded + /// @sa https://json.nlohmann.me/api/basic_json/is_discarded/ + constexpr bool is_discarded() const noexcept + { + return m_type == value_t::discarded; + } + + /// @brief return the type of the JSON value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/operator_value_t/ + constexpr operator value_t() const noexcept + { + return m_type; + } + + /// @} + + private: + ////////////////// + // value access // + ////////////////// + + /// get a boolean (explicit) + boolean_t get_impl(boolean_t* /*unused*/) const + { + if (JSON_HEDLEY_LIKELY(is_boolean())) + { + return m_value.boolean; + } + + JSON_THROW(type_error::create(302, detail::concat("type must be boolean, but is ", type_name()), this)); + } + + /// get a pointer to the value (object) + object_t* get_impl_ptr(object_t* /*unused*/) noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (object) + constexpr const object_t* get_impl_ptr(const object_t* /*unused*/) const noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (array) + array_t* get_impl_ptr(array_t* /*unused*/) noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (array) + constexpr const array_t* get_impl_ptr(const array_t* /*unused*/) const noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (string) + string_t* get_impl_ptr(string_t* /*unused*/) noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (string) + constexpr const string_t* get_impl_ptr(const string_t* /*unused*/) const noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (boolean) + boolean_t* get_impl_ptr(boolean_t* /*unused*/) noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (boolean) + constexpr const boolean_t* get_impl_ptr(const boolean_t* /*unused*/) const noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (integer number) + number_integer_t* get_impl_ptr(number_integer_t* /*unused*/) noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (integer number) + constexpr const number_integer_t* get_impl_ptr(const number_integer_t* /*unused*/) const noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (unsigned number) + number_unsigned_t* get_impl_ptr(number_unsigned_t* /*unused*/) noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (unsigned number) + constexpr const number_unsigned_t* get_impl_ptr(const number_unsigned_t* /*unused*/) const noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (floating-point number) + number_float_t* get_impl_ptr(number_float_t* /*unused*/) noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (floating-point number) + constexpr const number_float_t* get_impl_ptr(const number_float_t* /*unused*/) const noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (binary) + binary_t* get_impl_ptr(binary_t* /*unused*/) noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /// get a pointer to the value (binary) + constexpr const binary_t* get_impl_ptr(const binary_t* /*unused*/) const noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /*! + @brief helper function to implement get_ref() + + This function helps to implement get_ref() without code duplication for + const and non-const overloads + + @tparam ThisType will be deduced as `basic_json` or `const basic_json` + + @throw type_error.303 if ReferenceType does not match underlying value + type of the current JSON + */ + template<typename ReferenceType, typename ThisType> + static ReferenceType get_ref_impl(ThisType& obj) + { + // delegate the call to get_ptr<>() + auto* ptr = obj.template get_ptr<typename std::add_pointer<ReferenceType>::type>(); + + if (JSON_HEDLEY_LIKELY(ptr != nullptr)) + { + return *ptr; + } + + JSON_THROW(type_error::create(303, detail::concat("incompatible ReferenceType for get_ref, actual type is ", obj.type_name()), &obj)); + } + + public: + /// @name value access + /// Direct access to the stored value of a JSON value. + /// @{ + + /// @brief get a pointer value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ptr/ + template<typename PointerType, typename std::enable_if< + std::is_pointer<PointerType>::value, int>::type = 0> + auto get_ptr() noexcept -> decltype(std::declval<basic_json_t&>().get_impl_ptr(std::declval<PointerType>())) + { + // delegate the call to get_impl_ptr<>() + return get_impl_ptr(static_cast<PointerType>(nullptr)); + } + + /// @brief get a pointer value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ptr/ + template < typename PointerType, typename std::enable_if < + std::is_pointer<PointerType>::value&& + std::is_const<typename std::remove_pointer<PointerType>::type>::value, int >::type = 0 > + constexpr auto get_ptr() const noexcept -> decltype(std::declval<const basic_json_t&>().get_impl_ptr(std::declval<PointerType>())) + { + // delegate the call to get_impl_ptr<>() const + return get_impl_ptr(static_cast<PointerType>(nullptr)); + } + + private: + /*! + @brief get a value (explicit) + + Explicit type conversion between the JSON value and a compatible value + which is [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer<ValueType> + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + ValueType ret; + JSONSerializer<ValueType>::from_json(*this, ret); + return ret; + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json, + - @ref json_serializer<ValueType> has a `from_json()` method of the form + `void from_json(const basic_json&, ValueType&)`, and + - @ref json_serializer<ValueType> does not have a `from_json()` method of + the form `ValueType from_json(const basic_json&)` + + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer<ValueType> `from_json()` method throws + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector<short>`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map<std::string\, + json>`.,get__ValueType_const} + + @since version 2.1.0 + */ + template < typename ValueType, + detail::enable_if_t < + detail::is_default_constructible<ValueType>::value&& + detail::has_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType get_impl(detail::priority_tag<0> /*unused*/) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), std::declval<ValueType&>()))) + { + auto ret = ValueType(); + JSONSerializer<ValueType>::from_json(*this, ret); + return ret; + } + + /*! + @brief get a value (explicit); special case + + Explicit type conversion between the JSON value and a compatible value + which is **not** [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and **not** [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer<ValueType> + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + return JSONSerializer<ValueType>::from_json(*this); + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json and + - @ref json_serializer<ValueType> has a `from_json()` method of the form + `ValueType from_json(const basic_json&)` + + @note If @ref json_serializer<ValueType> has both overloads of + `from_json()`, this one is chosen. + + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer<ValueType> `from_json()` method throws + + @since version 2.1.0 + */ + template < typename ValueType, + detail::enable_if_t < + detail::has_non_default_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType get_impl(detail::priority_tag<1> /*unused*/) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>()))) + { + return JSONSerializer<ValueType>::from_json(*this); + } + + /*! + @brief get special-case overload + + This overloads converts the current @ref basic_json in a different + @ref basic_json type + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this, converted into @a BasicJsonType + + @complexity Depending on the implementation of the called `from_json()` + method. + + @since version 3.2.0 + */ + template < typename BasicJsonType, + detail::enable_if_t < + detail::is_basic_json<BasicJsonType>::value, + int > = 0 > + BasicJsonType get_impl(detail::priority_tag<2> /*unused*/) const + { + return *this; + } + + /*! + @brief get special-case overload + + This overloads avoids a lot of template boilerplate, it can be seen as the + identity method + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this + + @complexity Constant. + + @since version 2.1.0 + */ + template<typename BasicJsonType, + detail::enable_if_t< + std::is_same<BasicJsonType, basic_json_t>::value, + int> = 0> + basic_json get_impl(detail::priority_tag<3> /*unused*/) const + { + return *this; + } + + /*! + @brief get a pointer value (explicit) + @copydoc get() + */ + template<typename PointerType, + detail::enable_if_t< + std::is_pointer<PointerType>::value, + int> = 0> + constexpr auto get_impl(detail::priority_tag<4> /*unused*/) const noexcept + -> decltype(std::declval<const basic_json_t&>().template get_ptr<PointerType>()) + { + // delegate the call to get_ptr + return get_ptr<PointerType>(); + } + + public: + /*! + @brief get a (pointer) value (explicit) + + Performs explicit type conversion between the JSON value and a compatible value if required. + + - If the requested type is a pointer to the internally stored JSON value that pointer is returned. + No copies are made. + + - If the requested type is the current @ref basic_json, or a different @ref basic_json convertible + from the current @ref basic_json. + + - Otherwise the value is converted by calling the @ref json_serializer<ValueType> `from_json()` + method. + + @tparam ValueTypeCV the provided value type + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @tparam ValueType if necessary + + @throw what @ref json_serializer<ValueType> `from_json()` method throws if conversion is required + + @since version 2.1.0 + */ + template < typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>> +#if defined(JSON_HAS_CPP_14) + constexpr +#endif + auto get() const noexcept( + noexcept(std::declval<const basic_json_t&>().template get_impl<ValueType>(detail::priority_tag<4> {}))) + -> decltype(std::declval<const basic_json_t&>().template get_impl<ValueType>(detail::priority_tag<4> {})) + { + // we cannot static_assert on ValueTypeCV being non-const, because + // there is support for get<const basic_json_t>(), which is why we + // still need the uncvref + static_assert(!std::is_reference<ValueTypeCV>::value, + "get() cannot be used with reference types, you might want to use get_ref()"); + return get_impl<ValueType>(detail::priority_tag<4> {}); + } + + /*! + @brief get a pointer value (explicit) + + Explicit pointer access to the internally stored JSON value. No copies are + made. + + @warning The pointer becomes invalid if the underlying JSON object + changes. + + @tparam PointerType pointer type; must be a pointer to @ref array_t, @ref + object_t, @ref string_t, @ref boolean_t, @ref number_integer_t, + @ref number_unsigned_t, or @ref number_float_t. + + @return pointer to the internally stored JSON value if the requested + pointer type @a PointerType fits to the JSON value; `nullptr` otherwise + + @complexity Constant. + + @liveexample{The example below shows how pointers to internal values of a + JSON value can be requested. Note that no type conversions are made and a + `nullptr` is returned if the value and the requested pointer type does not + match.,get__PointerType} + + @sa see @ref get_ptr() for explicit pointer-member access + + @since version 1.0.0 + */ + template<typename PointerType, typename std::enable_if< + std::is_pointer<PointerType>::value, int>::type = 0> + auto get() noexcept -> decltype(std::declval<basic_json_t&>().template get_ptr<PointerType>()) + { + // delegate the call to get_ptr + return get_ptr<PointerType>(); + } + + /// @brief get a value (explicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_to/ + template < typename ValueType, + detail::enable_if_t < + !detail::is_basic_json<ValueType>::value&& + detail::has_from_json<basic_json_t, ValueType>::value, + int > = 0 > + ValueType & get_to(ValueType& v) const noexcept(noexcept( + JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), v))) + { + JSONSerializer<ValueType>::from_json(*this, v); + return v; + } + + // specialization to allow calling get_to with a basic_json value + // see https://github.com/nlohmann/json/issues/2175 + template<typename ValueType, + detail::enable_if_t < + detail::is_basic_json<ValueType>::value, + int> = 0> + ValueType & get_to(ValueType& v) const + { + v = *this; + return v; + } + + template < + typename T, std::size_t N, + typename Array = T (&)[N], // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + detail::enable_if_t < + detail::has_from_json<basic_json_t, Array>::value, int > = 0 > + Array get_to(T (&v)[N]) const // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + noexcept(noexcept(JSONSerializer<Array>::from_json( + std::declval<const basic_json_t&>(), v))) + { + JSONSerializer<Array>::from_json(*this, v); + return v; + } + + /// @brief get a reference value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ref/ + template<typename ReferenceType, typename std::enable_if< + std::is_reference<ReferenceType>::value, int>::type = 0> + ReferenceType get_ref() + { + // delegate call to get_ref_impl + return get_ref_impl<ReferenceType>(*this); + } + + /// @brief get a reference value (implicit) + /// @sa https://json.nlohmann.me/api/basic_json/get_ref/ + template < typename ReferenceType, typename std::enable_if < + std::is_reference<ReferenceType>::value&& + std::is_const<typename std::remove_reference<ReferenceType>::type>::value, int >::type = 0 > + ReferenceType get_ref() const + { + // delegate call to get_ref_impl + return get_ref_impl<ReferenceType>(*this); + } + + /*! + @brief get a value (implicit) + + Implicit type conversion between the JSON value and a compatible value. + The call is realized by calling @ref get() const. + + @tparam ValueType non-pointer type compatible to the JSON value, for + instance `int` for JSON integer numbers, `bool` for JSON booleans, or + `std::vector` types for JSON arrays. The character type of @ref string_t + as well as an initializer list of this type is excluded to avoid + ambiguities as these types implicitly convert to `std::string`. + + @return copy of the JSON value, converted to type @a ValueType + + @throw type_error.302 in case passed type @a ValueType is incompatible + to the JSON value type (e.g., the JSON value is of type boolean, but a + string is requested); see example below + + @complexity Linear in the size of the JSON value. + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector<short>`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map<std::string\, + json>`.,operator__ValueType} + + @since version 1.0.0 + */ + template < typename ValueType, typename std::enable_if < + detail::conjunction < + detail::negation<std::is_pointer<ValueType>>, + detail::negation<std::is_same<ValueType, std::nullptr_t>>, + detail::negation<std::is_same<ValueType, detail::json_ref<basic_json>>>, + detail::negation<std::is_same<ValueType, typename string_t::value_type>>, + detail::negation<detail::is_basic_json<ValueType>>, + detail::negation<std::is_same<ValueType, std::initializer_list<typename string_t::value_type>>>, +#if defined(JSON_HAS_CPP_17) && (defined(__GNUC__) || (defined(_MSC_VER) && _MSC_VER >= 1910 && _MSC_VER <= 1914)) + detail::negation<std::is_same<ValueType, std::string_view>>, +#endif +#if defined(JSON_HAS_CPP_17) + detail::negation<std::is_same<ValueType, std::any>>, +#endif + detail::is_detected_lazy<detail::get_template_function, const basic_json_t&, ValueType> + >::value, int >::type = 0 > + JSON_EXPLICIT operator ValueType() const + { + // delegate the call to get<>() const + return get<ValueType>(); + } + + /// @brief get a binary value + /// @sa https://json.nlohmann.me/api/basic_json/get_binary/ + binary_t& get_binary() + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, detail::concat("type must be binary, but is ", type_name()), this)); + } + + return *get_ptr<binary_t*>(); + } + + /// @brief get a binary value + /// @sa https://json.nlohmann.me/api/basic_json/get_binary/ + const binary_t& get_binary() const + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, detail::concat("type must be binary, but is ", type_name()), this)); + } + + return *get_ptr<const binary_t*>(); + } + + /// @} + + + //////////////////// + // element access // + //////////////////// + + /// @name element access + /// Access to the JSON value. + /// @{ + + /// @brief access specified array element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(size_type idx) + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return set_parent(m_value.array->at(idx)); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + } + else + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + } + + /// @brief access specified array element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(size_type idx) const + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return m_value.array->at(idx); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + } + else + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(const typename object_t::key_type& key) + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(key); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", key, "' not found"), this)); + } + return set_parent(it->second); + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + reference at(KeyType && key) + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", string_t(std::forward<KeyType>(key)), "' not found"), this)); + } + return set_parent(it->second); + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(const typename object_t::key_type& key) const + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(key); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", key, "' not found"), this)); + } + return it->second; + } + + /// @brief access specified object element with bounds checking + /// @sa https://json.nlohmann.me/api/basic_json/at/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + const_reference at(KeyType && key) const + { + // at only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(304, detail::concat("cannot use at() with ", type_name()), this)); + } + + auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it == m_value.object->end()) + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", string_t(std::forward<KeyType>(key)), "' not found"), this)); + } + return it->second; + } + + /// @brief access specified array element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](size_type idx) + { + // implicitly convert null value to an empty array + if (is_null()) + { + m_type = value_t::array; + m_value.array = create<array_t>(); + assert_invariant(); + } + + // operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // fill up array with null values if given idx is outside range + if (idx >= m_value.array->size()) + { +#if JSON_DIAGNOSTICS + // remember array size & capacity before resizing + const auto old_size = m_value.array->size(); + const auto old_capacity = m_value.array->capacity(); +#endif + m_value.array->resize(idx + 1); + +#if JSON_DIAGNOSTICS + if (JSON_HEDLEY_UNLIKELY(m_value.array->capacity() != old_capacity)) + { + // capacity has changed: update all parents + set_parents(); + } + else + { + // set parent for values added above + set_parents(begin() + static_cast<typename iterator::difference_type>(old_size), static_cast<typename iterator::difference_type>(idx + 1 - old_size)); + } +#endif + assert_invariant(); + } + + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a numeric argument with ", type_name()), this)); + } + + /// @brief access specified array element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](size_type idx) const + { + // const operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a numeric argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](typename object_t::key_type key) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + // operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto result = m_value.object->emplace(std::move(key), nullptr); + return set_parent(result.first->second); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](const typename object_t::key_type& key) const + { + // const operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto it = m_value.object->find(key); + JSON_ASSERT(it != m_value.object->end()); + return it->second; + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + // these two functions resolve a (const) char * ambiguity affecting Clang and MSVC + // (they seemingly cannot be constrained to resolve the ambiguity) + template<typename T> + reference operator[](T* key) + { + return operator[](typename object_t::key_type(key)); + } + + template<typename T> + const_reference operator[](T* key) const + { + return operator[](typename object_t::key_type(key)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + reference operator[](KeyType && key) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + // operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto result = m_value.object->emplace(std::forward<KeyType>(key), nullptr); + return set_parent(result.first->second); + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + const_reference operator[](KeyType && key) const + { + // const operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + auto it = m_value.object->find(std::forward<KeyType>(key)); + JSON_ASSERT(it != m_value.object->end()); + return it->second; + } + + JSON_THROW(type_error::create(305, detail::concat("cannot use operator[] with a string argument with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + // this is the value(const typename object_t::key_type&) overload + template < class KeyType, class ValueType, detail::enable_if_t < + std::is_same<KeyType, typename object_t::key_type>::value + && detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value, int > = 0 > + typename std::decay<ValueType>::type value(const KeyType& key, ValueType && default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if key is found, return value and given default value otherwise + const auto it = find(key); + if (it != end()) + { + return it->template get<typename std::decay<ValueType>::type>(); + } + + return std::forward<ValueType>(default_value); + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + string_t value(const typename object_t::key_type& key, const char* default_value) const + { + return value(key, string_t(default_value)); + } + + // these two functions, in conjunction with value(const KeyType &, ValueType &&), + // resolve an ambiguity that would otherwise occur between the json_pointer and + // typename object_t::key_type & overloads + template < class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value, int > = 0 > + typename std::decay<ValueType>::type value(const char* key, ValueType && default_value) const + { + return value(typename object_t::key_type(key), std::forward<ValueType>(default_value)); + } + + string_t value(const char* key, const char* default_value) const + { + return value(typename object_t::key_type(key), string_t(default_value)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// using std::is_convertible in a std::enable_if will fail when using explicit conversions + template < class KeyType, class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value + && !std::is_same<value_t, ValueType>::value + && detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int > = 0 > + typename std::decay<ValueType>::type value(KeyType && key, ValueType && default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if key is found, return value and given default value otherwise + const auto it = find(std::forward<KeyType>(key)); + if (it != end()) + { + return it->template get<typename std::decay<ValueType>::type>(); + } + + return std::forward<ValueType>(default_value); + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + /// @brief access specified object element with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + template < class KeyType, detail::enable_if_t < + !detail::is_json_pointer<KeyType>::value, int > = 0 > + string_t value(KeyType && key, const char* default_value) const + { + return value(std::forward<KeyType>(key), string_t(default_value)); + } + + /// @brief access specified object element via JSON Pointer with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + template < class ValueType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value, int> = 0 > + ValueType value(const json_pointer& ptr, const ValueType& default_value) const + { + // value only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if pointer resolves a value, return it or use default value + JSON_TRY + { + return ptr.get_checked(this).template get<ValueType>(); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + return default_value; + } + } + + JSON_THROW(type_error::create(306, detail::concat("cannot use value() with ", type_name()), this)); + } + + template < class ValueType, class BasicJsonType, detail::enable_if_t < + detail::is_getable<basic_json_t, ValueType>::value, int> = 0 > + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + ValueType value(const ::nlohmann::json_pointer<BasicJsonType>& ptr, const ValueType& default_value) const + { + return value(ptr.convert(), default_value); + } + + /// @brief access specified object element via JSON Pointer with default value + /// @sa https://json.nlohmann.me/api/basic_json/value/ + /// overload for a default value of type const char* + JSON_HEDLEY_NON_NULL(3) + string_t value(const json_pointer& ptr, const char* default_value) const + { + return value(ptr, string_t(default_value)); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + JSON_HEDLEY_NON_NULL(3) + string_t value(const typename ::nlohmann::json_pointer<BasicJsonType>& ptr, const char* default_value) const + { + return value(ptr.convert(), default_value); + } + + /// @brief access the first element + /// @sa https://json.nlohmann.me/api/basic_json/front/ + reference front() + { + return *begin(); + } + + /// @brief access the first element + /// @sa https://json.nlohmann.me/api/basic_json/front/ + const_reference front() const + { + return *cbegin(); + } + + /// @brief access the last element + /// @sa https://json.nlohmann.me/api/basic_json/back/ + reference back() + { + auto tmp = end(); + --tmp; + return *tmp; + } + + /// @brief access the last element + /// @sa https://json.nlohmann.me/api/basic_json/back/ + const_reference back() const + { + auto tmp = cend(); + --tmp; + return *tmp; + } + + /// @brief remove element given an iterator + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template < class IteratorType, detail::enable_if_t < + std::is_same<IteratorType, typename basic_json_t::iterator>::value || + std::is_same<IteratorType, typename basic_json_t::const_iterator>::value, int > = 0 > + IteratorType erase(IteratorType pos) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != pos.m_object)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_UNLIKELY(!pos.m_it.primitive_iterator.is_begin())) + { + JSON_THROW(invalid_iterator::create(205, "iterator out of range", this)); + } + + if (is_string()) + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(pos.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(pos.m_it.array_iterator); + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return result; + } + + /// @brief remove elements given an iterator range + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template < class IteratorType, detail::enable_if_t < + std::is_same<IteratorType, typename basic_json_t::iterator>::value || + std::is_same<IteratorType, typename basic_json_t::const_iterator>::value, int > = 0 > + IteratorType erase(IteratorType first, IteratorType last) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != first.m_object || this != last.m_object)) + { + JSON_THROW(invalid_iterator::create(203, "iterators do not fit current value", this)); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_LIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range", this)); + } + + if (is_string()) + { + AllocatorType<string_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType<binary_t> alloc; + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.binary); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + case value_t::null: + case value_t::discarded: + default: + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return result; + } + + private: + template < typename KeyType, detail::enable_if_t < + detail::has_erase_with_key_type<basic_json_t, KeyType>::value, int > = 0 > + size_type erase_internal(KeyType && key) + { + // this erase only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + return m_value.object->erase(std::forward<KeyType>(key)); + } + + template < typename KeyType, detail::enable_if_t < + !detail::has_erase_with_key_type<basic_json_t, KeyType>::value, int > = 0 > + size_type erase_internal(KeyType && key) + { + // this erase only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + + const auto it = m_value.object->find(std::forward<KeyType>(key)); + if (it != m_value.object->end()) + { + m_value.object->erase(it); + return 1; + } + return 0; + } + + public: + + /// @brief remove element from a JSON object given a key + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + size_type erase(const typename object_t::key_type& key) + { + // the indirection via erase_internal() is added to avoid making this + // function a template and thus de-rank it during overload resolution + return erase_internal(key); + } + + /// @brief remove element from a JSON object given a key + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + size_type erase(KeyType && key) + { + return erase_internal(std::forward<KeyType>(key)); + } + + /// @brief remove element from a JSON array given an index + /// @sa https://json.nlohmann.me/api/basic_json/erase/ + void erase(const size_type idx) + { + // this erase only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + if (JSON_HEDLEY_UNLIKELY(idx >= size())) + { + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), this)); + } + + m_value.array->erase(m_value.array->begin() + static_cast<difference_type>(idx)); + } + else + { + JSON_THROW(type_error::create(307, detail::concat("cannot use erase() with ", type_name()), this)); + } + } + + /// @} + + + //////////// + // lookup // + //////////// + + /// @name lookup + /// @{ + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + iterator find(const typename object_t::key_type& key) + { + auto result = end(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(key); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + const_iterator find(const typename object_t::key_type& key) const + { + auto result = cend(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(key); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + iterator find(KeyType && key) + { + auto result = end(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward<KeyType>(key)); + } + + return result; + } + + /// @brief find an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/find/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + const_iterator find(KeyType && key) const + { + auto result = cend(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward<KeyType>(key)); + } + + return result; + } + + /// @brief returns the number of occurrences of a key in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/count/ + size_type count(const typename object_t::key_type& key) const + { + // return 0 for all nonobject types + return is_object() ? m_value.object->count(key) : 0; + } + + /// @brief returns the number of occurrences of a key in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/count/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + size_type count(KeyType && key) const + { + // return 0 for all nonobject types + return is_object() ? m_value.object->count(std::forward<KeyType>(key)) : 0; + } + + /// @brief check the existence of an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + bool contains(const typename object_t::key_type& key) const + { + return is_object() && m_value.object->find(key) != m_value.object->end(); + } + + /// @brief check the existence of an element in a JSON object + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + template<class KeyType, detail::enable_if_t< + detail::is_usable_as_key_type<basic_json_t, KeyType>::value, int> = 0> + bool contains(KeyType && key) const + { + return is_object() && m_value.object->find(std::forward<KeyType>(key)) != m_value.object->end(); + } + + /// @brief check the existence of an element in a JSON object given a JSON pointer + /// @sa https://json.nlohmann.me/api/basic_json/contains/ + bool contains(const json_pointer& ptr) const + { + return ptr.contains(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + bool contains(const typename ::nlohmann::json_pointer<BasicJsonType> ptr) const + { + return ptr.contains(this); + } + + /// @} + + + /////////////// + // iterators // + /////////////// + + /// @name iterators + /// @{ + + /// @brief returns an iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/begin/ + iterator begin() noexcept + { + iterator result(this); + result.set_begin(); + return result; + } + + /// @brief returns an iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/begin/ + const_iterator begin() const noexcept + { + return cbegin(); + } + + /// @brief returns a const iterator to the first element + /// @sa https://json.nlohmann.me/api/basic_json/cbegin/ + const_iterator cbegin() const noexcept + { + const_iterator result(this); + result.set_begin(); + return result; + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/end/ + iterator end() noexcept + { + iterator result(this); + result.set_end(); + return result; + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/end/ + const_iterator end() const noexcept + { + return cend(); + } + + /// @brief returns an iterator to one past the last element + /// @sa https://json.nlohmann.me/api/basic_json/cend/ + const_iterator cend() const noexcept + { + const_iterator result(this); + result.set_end(); + return result; + } + + /// @brief returns an iterator to the reverse-beginning + /// @sa https://json.nlohmann.me/api/basic_json/rbegin/ + reverse_iterator rbegin() noexcept + { + return reverse_iterator(end()); + } + + /// @brief returns an iterator to the reverse-beginning + /// @sa https://json.nlohmann.me/api/basic_json/rbegin/ + const_reverse_iterator rbegin() const noexcept + { + return crbegin(); + } + + /// @brief returns an iterator to the reverse-end + /// @sa https://json.nlohmann.me/api/basic_json/rend/ + reverse_iterator rend() noexcept + { + return reverse_iterator(begin()); + } + + /// @brief returns an iterator to the reverse-end + /// @sa https://json.nlohmann.me/api/basic_json/rend/ + const_reverse_iterator rend() const noexcept + { + return crend(); + } + + /// @brief returns a const reverse iterator to the last element + /// @sa https://json.nlohmann.me/api/basic_json/crbegin/ + const_reverse_iterator crbegin() const noexcept + { + return const_reverse_iterator(cend()); + } + + /// @brief returns a const reverse iterator to one before the first + /// @sa https://json.nlohmann.me/api/basic_json/crend/ + const_reverse_iterator crend() const noexcept + { + return const_reverse_iterator(cbegin()); + } + + public: + /// @brief wrapper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + /// @deprecated This function is deprecated since 3.1.0 and will be removed in + /// version 4.0.0 of the library. Please use @ref items() instead; + /// that is, replace `json::iterator_wrapper(j)` with `j.items()`. + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy<iterator> iterator_wrapper(reference ref) noexcept + { + return ref.items(); + } + + /// @brief wrapper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + /// @deprecated This function is deprecated since 3.1.0 and will be removed in + /// version 4.0.0 of the library. Please use @ref items() instead; + /// that is, replace `json::iterator_wrapper(j)` with `j.items()`. + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy<const_iterator> iterator_wrapper(const_reference ref) noexcept + { + return ref.items(); + } + + /// @brief helper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + iteration_proxy<iterator> items() noexcept + { + return iteration_proxy<iterator>(*this); + } + + /// @brief helper to access iterator member functions in range-based for + /// @sa https://json.nlohmann.me/api/basic_json/items/ + iteration_proxy<const_iterator> items() const noexcept + { + return iteration_proxy<const_iterator>(*this); + } + + /// @} + + + ////////////// + // capacity // + ////////////// + + /// @name capacity + /// @{ + + /// @brief checks whether the container is empty. + /// @sa https://json.nlohmann.me/api/basic_json/empty/ + bool empty() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return true; + } + + case value_t::array: + { + // delegate call to array_t::empty() + return m_value.array->empty(); + } + + case value_t::object: + { + // delegate call to object_t::empty() + return m_value.object->empty(); + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types are nonempty + return false; + } + } + } + + /// @brief returns the number of elements + /// @sa https://json.nlohmann.me/api/basic_json/size/ + size_type size() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return 0; + } + + case value_t::array: + { + // delegate call to array_t::size() + return m_value.array->size(); + } + + case value_t::object: + { + // delegate call to object_t::size() + return m_value.object->size(); + } + + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types have size 1 + return 1; + } + } + } + + /// @brief returns the maximum possible number of elements + /// @sa https://json.nlohmann.me/api/basic_json/max_size/ + size_type max_size() const noexcept + { + switch (m_type) + { + case value_t::array: + { + // delegate call to array_t::max_size() + return m_value.array->max_size(); + } + + case value_t::object: + { + // delegate call to object_t::max_size() + return m_value.object->max_size(); + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // all other types have max_size() == size() + return size(); + } + } + } + + /// @} + + + /////////////// + // modifiers // + /////////////// + + /// @name modifiers + /// @{ + + /// @brief clears the contents + /// @sa https://json.nlohmann.me/api/basic_json/clear/ + void clear() noexcept + { + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = 0; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = 0; + break; + } + + case value_t::number_float: + { + m_value.number_float = 0.0; + break; + } + + case value_t::boolean: + { + m_value.boolean = false; + break; + } + + case value_t::string: + { + m_value.string->clear(); + break; + } + + case value_t::binary: + { + m_value.binary->clear(); + break; + } + + case value_t::array: + { + m_value.array->clear(); + break; + } + + case value_t::object: + { + m_value.object->clear(); + break; + } + + case value_t::null: + case value_t::discarded: + default: + break; + } + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(basic_json&& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (move semantics) + const auto old_capacity = m_value.array->capacity(); + m_value.array->push_back(std::move(val)); + set_parent(m_value.array->back(), old_capacity); + // if val is moved from, basic_json move constructor marks it null, so we do not call the destructor + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(basic_json&& val) + { + push_back(std::move(val)); + return *this; + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(const basic_json& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array + const auto old_capacity = m_value.array->capacity(); + m_value.array->push_back(val); + set_parent(m_value.array->back(), old_capacity); + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(const basic_json& val) + { + push_back(val); + return *this; + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(const typename object_t::value_type& val) + { + // push_back only works for null objects or objects + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(308, detail::concat("cannot use push_back() with ", type_name()), this)); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to object + auto res = m_value.object->insert(val); + set_parent(res.first->second); + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(const typename object_t::value_type& val) + { + push_back(val); + return *this; + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/push_back/ + void push_back(initializer_list_t init) + { + if (is_object() && init.size() == 2 && (*init.begin())->is_string()) + { + basic_json&& key = init.begin()->moved_or_copied(); + push_back(typename object_t::value_type( + std::move(key.get_ref<string_t&>()), (init.begin() + 1)->moved_or_copied())); + } + else + { + push_back(basic_json(init)); + } + } + + /// @brief add an object to an object + /// @sa https://json.nlohmann.me/api/basic_json/operator+=/ + reference operator+=(initializer_list_t init) + { + push_back(init); + return *this; + } + + /// @brief add an object to an array + /// @sa https://json.nlohmann.me/api/basic_json/emplace_back/ + template<class... Args> + reference emplace_back(Args&& ... args) + { + // emplace_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(311, detail::concat("cannot use emplace_back() with ", type_name()), this)); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (perfect forwarding) + const auto old_capacity = m_value.array->capacity(); + m_value.array->emplace_back(std::forward<Args>(args)...); + return set_parent(m_value.array->back(), old_capacity); + } + + /// @brief add an object to an object if key does not exist + /// @sa https://json.nlohmann.me/api/basic_json/emplace/ + template<class... Args> + std::pair<iterator, bool> emplace(Args&& ... args) + { + // emplace only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(311, detail::concat("cannot use emplace() with ", type_name()), this)); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to array (perfect forwarding) + auto res = m_value.object->emplace(std::forward<Args>(args)...); + set_parent(res.first->second); + + // create result iterator and set iterator to the result of emplace + auto it = begin(); + it.m_it.object_iterator = res.first; + + // return pair of iterator and boolean + return {it, res.second}; + } + + /// Helper for insertion of an iterator + /// @note: This uses std::distance to support GCC 4.8, + /// see https://github.com/nlohmann/json/pull/1257 + template<typename... Args> + iterator insert_iterator(const_iterator pos, Args&& ... args) + { + iterator result(this); + JSON_ASSERT(m_value.array != nullptr); + + auto insert_pos = std::distance(m_value.array->begin(), pos.m_it.array_iterator); + m_value.array->insert(pos.m_it.array_iterator, std::forward<Args>(args)...); + result.m_it.array_iterator = m_value.array->begin() + insert_pos; + + // This could have been written as: + // result.m_it.array_iterator = m_value.array->insert(pos.m_it.array_iterator, cnt, val); + // but the return value of insert is missing in GCC 4.8, so it is written this way instead. + + set_parents(); + return result; + } + + /// @brief inserts element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, val); + } + + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + /// @brief inserts element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, basic_json&& val) + { + return insert(pos, val); + } + + /// @brief inserts copies of element into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, size_type cnt, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, cnt, val); + } + + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + /// @brief inserts range of elements into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, const_iterator first, const_iterator last) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + if (JSON_HEDLEY_UNLIKELY(first.m_object == this)) + { + JSON_THROW(invalid_iterator::create(211, "passed iterators may not belong to container", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, first.m_it.array_iterator, last.m_it.array_iterator); + } + + /// @brief inserts elements from initializer list into array + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + iterator insert(const_iterator pos, initializer_list_t ilist) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value", this)); + } + + // insert to array and return iterator + return insert_iterator(pos, ilist.begin(), ilist.end()); + } + + /// @brief inserts range of elements into object + /// @sa https://json.nlohmann.me/api/basic_json/insert/ + void insert(const_iterator first, const_iterator last) + { + // insert only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(309, detail::concat("cannot use insert() with ", type_name()), this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object())) + { + JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects", this)); + } + + m_value.object->insert(first.m_it.object_iterator, last.m_it.object_iterator); + } + + /// @brief updates a JSON object from another object, overwriting existing keys + /// @sa https://json.nlohmann.me/api/basic_json/update/ + void update(const_reference j, bool merge_objects = false) + { + update(j.begin(), j.end(), merge_objects); + } + + /// @brief updates a JSON object from another object, overwriting existing keys + /// @sa https://json.nlohmann.me/api/basic_json/update/ + void update(const_iterator first, const_iterator last, bool merge_objects = false) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create<object_t>(); + assert_invariant(); + } + + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(312, detail::concat("cannot use update() with ", type_name()), this)); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit", this)); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object())) + { + JSON_THROW(type_error::create(312, detail::concat("cannot use update() with ", first.m_object->type_name()), first.m_object)); + } + + for (auto it = first; it != last; ++it) + { + if (merge_objects && it.value().is_object()) + { + auto it2 = m_value.object->find(it.key()); + if (it2 != m_value.object->end()) + { + it2->second.update(it.value(), true); + continue; + } + } + m_value.object->operator[](it.key()) = it.value(); +#if JSON_DIAGNOSTICS + m_value.object->operator[](it.key()).m_parent = this; +#endif + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(reference other) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + std::swap(m_type, other.m_type); + std::swap(m_value, other.m_value); + + set_parents(); + other.set_parents(); + assert_invariant(); + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + friend void swap(reference left, reference right) noexcept ( + std::is_nothrow_move_constructible<value_t>::value&& + std::is_nothrow_move_assignable<value_t>::value&& + std::is_nothrow_move_constructible<json_value>::value&& + std::is_nothrow_move_assignable<json_value>::value + ) + { + left.swap(right); + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(array_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + std::swap(*(m_value.array), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(object_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + std::swap(*(m_value.object), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(string_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_string())) + { + std::swap(*(m_value.string), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(binary_t& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @brief exchanges the values + /// @sa https://json.nlohmann.me/api/basic_json/swap/ + void swap(typename binary_t::container_type& other) // NOLINT(bugprone-exception-escape) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, detail::concat("cannot use swap() with ", type_name()), this)); + } + } + + /// @} + + ////////////////////////////////////////// + // lexicographical comparison operators // + ////////////////////////////////////////// + + /// @name lexicographical comparison operators + /// @{ + + // note parentheses around operands are necessary; see + // https://github.com/nlohmann/json/issues/1530 +#define JSON_IMPLEMENT_OPERATOR(op, null_result, unordered_result, default_result) \ + const auto lhs_type = lhs.type(); \ + const auto rhs_type = rhs.type(); \ + \ + if (lhs_type == rhs_type) /* NOLINT(readability/braces) */ \ + { \ + switch (lhs_type) \ + { \ + case value_t::array: \ + return (*lhs.m_value.array) op (*rhs.m_value.array); \ + \ + case value_t::object: \ + return (*lhs.m_value.object) op (*rhs.m_value.object); \ + \ + case value_t::null: \ + return (null_result); \ + \ + case value_t::string: \ + return (*lhs.m_value.string) op (*rhs.m_value.string); \ + \ + case value_t::boolean: \ + return (lhs.m_value.boolean) op (rhs.m_value.boolean); \ + \ + case value_t::number_integer: \ + return (lhs.m_value.number_integer) op (rhs.m_value.number_integer); \ + \ + case value_t::number_unsigned: \ + return (lhs.m_value.number_unsigned) op (rhs.m_value.number_unsigned); \ + \ + case value_t::number_float: \ + return (lhs.m_value.number_float) op (rhs.m_value.number_float); \ + \ + case value_t::binary: \ + return (*lhs.m_value.binary) op (*rhs.m_value.binary); \ + \ + case value_t::discarded: \ + default: \ + return (unordered_result); \ + } \ + } \ + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_float) \ + { \ + return static_cast<number_float_t>(lhs.m_value.number_integer) op rhs.m_value.number_float; \ + } \ + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_integer) \ + { \ + return lhs.m_value.number_float op static_cast<number_float_t>(rhs.m_value.number_integer); \ + } \ + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_float) \ + { \ + return static_cast<number_float_t>(lhs.m_value.number_unsigned) op rhs.m_value.number_float; \ + } \ + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_unsigned) \ + { \ + return lhs.m_value.number_float op static_cast<number_float_t>(rhs.m_value.number_unsigned); \ + } \ + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_integer) \ + { \ + return static_cast<number_integer_t>(lhs.m_value.number_unsigned) op rhs.m_value.number_integer; \ + } \ + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_unsigned) \ + { \ + return lhs.m_value.number_integer op static_cast<number_integer_t>(rhs.m_value.number_unsigned); \ + } \ + else if(compares_unordered(lhs, rhs))\ + {\ + return (unordered_result);\ + }\ + \ + return (default_result); + + JSON_PRIVATE_UNLESS_TESTED: + // returns true if: + // - any operand is NaN and the other operand is of number type + // - any operand is discarded + // in legacy mode, discarded values are considered ordered if + // an operation is computed as an odd number of inverses of others + static bool compares_unordered(const_reference lhs, const_reference rhs, bool inverse = false) noexcept + { + if ((lhs.is_number_float() && std::isnan(lhs.m_value.number_float) && rhs.is_number()) + || (rhs.is_number_float() && std::isnan(rhs.m_value.number_float) && lhs.is_number())) + { + return true; + } +#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + return (lhs.is_discarded() || rhs.is_discarded()) && !inverse; +#else + static_cast<void>(inverse); + return lhs.is_discarded() || rhs.is_discarded(); +#endif + } + + private: + bool compares_unordered(const_reference rhs, bool inverse = false) const noexcept + { + return compares_unordered(*this, rhs, inverse); + } + + public: +#if JSON_HAS_THREE_WAY_COMPARISON + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + bool operator==(const_reference rhs) const noexcept + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + const_reference lhs = *this; + JSON_IMPLEMENT_OPERATOR( ==, true, false, false) +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator==(ScalarType rhs) const noexcept + { + return *this == basic_json(rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + bool operator!=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !operator==(rhs); + } + + /// @brief comparison: 3-way + /// @sa https://json.nlohmann.me/api/basic_json/operator_spaceship/ + std::partial_ordering operator<=>(const_reference rhs) const noexcept // *NOPAD* + { + const_reference lhs = *this; + // default_result is used if we cannot compare values. In that case, + // we compare types. + JSON_IMPLEMENT_OPERATOR(<=>, // *NOPAD* + std::partial_ordering::equivalent, + std::partial_ordering::unordered, + lhs_type <=> rhs_type) // *NOPAD* + } + + /// @brief comparison: 3-way + /// @sa https://json.nlohmann.me/api/basic_json/operator_spaceship/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + std::partial_ordering operator<=>(ScalarType rhs) const noexcept // *NOPAD* + { + return *this <=> basic_json(rhs); // *NOPAD* + } + +#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + // all operators that are computed as an odd number of inverses of others + // need to be overloaded to emulate the legacy comparison behavior + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON) + bool operator<=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !(rhs < *this); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator<=(ScalarType rhs) const noexcept + { + return *this <= basic_json(rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON) + bool operator>=(const_reference rhs) const noexcept + { + if (compares_unordered(rhs, true)) + { + return false; + } + return !(*this < rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType> + requires std::is_scalar_v<ScalarType> + bool operator>=(ScalarType rhs) const noexcept + { + return *this >= basic_json(rhs); + } +#endif +#else + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + friend bool operator==(const_reference lhs, const_reference rhs) noexcept + { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + JSON_IMPLEMENT_OPERATOR( ==, true, false, false) +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator==(const_reference lhs, ScalarType rhs) noexcept + { + return lhs == basic_json(rhs); + } + + /// @brief comparison: equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_eq/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator==(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) == rhs; + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + friend bool operator!=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(lhs == rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator!=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs != basic_json(rhs); + } + + /// @brief comparison: not equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ne/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator!=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) != rhs; + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + friend bool operator<(const_reference lhs, const_reference rhs) noexcept + { + // default_result is used if we cannot compare values. In that case, + // we compare types. Note we have to call the operator explicitly, + // because MSVC has problems otherwise. + JSON_IMPLEMENT_OPERATOR( <, false, false, operator<(lhs_type, rhs_type)) + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<(const_reference lhs, ScalarType rhs) noexcept + { + return lhs < basic_json(rhs); + } + + /// @brief comparison: less than + /// @sa https://json.nlohmann.me/api/basic_json/operator_lt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) < rhs; + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + friend bool operator<=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(rhs < lhs); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs <= basic_json(rhs); + } + + /// @brief comparison: less than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_le/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator<=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) <= rhs; + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + friend bool operator>(const_reference lhs, const_reference rhs) noexcept + { + // double inverse + if (compares_unordered(lhs, rhs)) + { + return false; + } + return !(lhs <= rhs); + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>(const_reference lhs, ScalarType rhs) noexcept + { + return lhs > basic_json(rhs); + } + + /// @brief comparison: greater than + /// @sa https://json.nlohmann.me/api/basic_json/operator_gt/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) > rhs; + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + friend bool operator>=(const_reference lhs, const_reference rhs) noexcept + { + if (compares_unordered(lhs, rhs, true)) + { + return false; + } + return !(lhs < rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>=(const_reference lhs, ScalarType rhs) noexcept + { + return lhs >= basic_json(rhs); + } + + /// @brief comparison: greater than or equal + /// @sa https://json.nlohmann.me/api/basic_json/operator_ge/ + template<typename ScalarType, typename std::enable_if< + std::is_scalar<ScalarType>::value, int>::type = 0> + friend bool operator>=(ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) >= rhs; + } +#endif + +#undef JSON_IMPLEMENT_OPERATOR + + /// @} + + /////////////////// + // serialization // + /////////////////// + + /// @name serialization + /// @{ +#ifndef JSON_NO_IO + /// @brief serialize to stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_ltlt/ + friend std::ostream& operator<<(std::ostream& o, const basic_json& j) + { + // read width member and use it as indentation parameter if nonzero + const bool pretty_print = o.width() > 0; + const auto indentation = pretty_print ? o.width() : 0; + + // reset width to 0 for subsequent calls to this stream + o.width(0); + + // do the actual serialization + serializer s(detail::output_adapter<char>(o), o.fill()); + s.dump(j, pretty_print, false, static_cast<unsigned int>(indentation)); + return o; + } + + /// @brief serialize to stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_ltlt/ + /// @deprecated This function is deprecated since 3.0.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// operator<<(std::ostream&, const basic_json&) instead; that is, + /// replace calls like `j >> o;` with `o << j;`. + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator<<(std::ostream&, const basic_json&)) + friend std::ostream& operator>>(const basic_json& j, std::ostream& o) + { + return o << j; + } +#endif // JSON_NO_IO + /// @} + + + ///////////////////// + // deserialization // + ///////////////////// + + /// @name deserialization + /// @{ + + /// @brief deserialize from a compatible input + /// @sa https://json.nlohmann.me/api/basic_json/parse/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(InputType&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::forward<InputType>(i)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /// @brief deserialize from a pair of character iterators + /// @sa https://json.nlohmann.me/api/basic_json/parse/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(IteratorType first, + IteratorType last, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::move(first), std::move(last)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, parse(ptr, ptr + len)) + static basic_json parse(detail::span_input_adapter&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(i.get(), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /// @brief check if the input is valid JSON + /// @sa https://json.nlohmann.me/api/basic_json/accept/ + template<typename InputType> + static bool accept(InputType&& i, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::forward<InputType>(i)), nullptr, false, ignore_comments).accept(true); + } + + /// @brief check if the input is valid JSON + /// @sa https://json.nlohmann.me/api/basic_json/accept/ + template<typename IteratorType> + static bool accept(IteratorType first, IteratorType last, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::move(first), std::move(last)), nullptr, false, ignore_comments).accept(true); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, accept(ptr, ptr + len)) + static bool accept(detail::span_input_adapter&& i, + const bool ignore_comments = false) + { + return parser(i.get(), nullptr, false, ignore_comments).accept(true); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + template <typename InputType, typename SAX> + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(InputType&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::forward<InputType>(i)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + template<class IteratorType, class SAX> + JSON_HEDLEY_NON_NULL(3) + static bool sax_parse(IteratorType first, IteratorType last, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::move(first), std::move(last)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } + + /// @brief generate SAX events + /// @sa https://json.nlohmann.me/api/basic_json/sax_parse/ + /// @deprecated This function is deprecated since 3.8.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// sax_parse(ptr, ptr + len) instead. + template <typename SAX> + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, sax_parse(ptr, ptr + len, ...)) + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(detail::span_input_adapter&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = i.get(); + return format == input_format_t::json + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + : detail::binary_reader<basic_json, decltype(ia), SAX>(std::move(ia), format).sax_parse(format, sax, strict); + } +#ifndef JSON_NO_IO + /// @brief deserialize from stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_gtgt/ + /// @deprecated This stream operator is deprecated since 3.0.0 and will be removed in + /// version 4.0.0 of the library. Please use + /// operator>>(std::istream&, basic_json&) instead; that is, + /// replace calls like `j << i;` with `i >> j;`. + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator>>(std::istream&, basic_json&)) + friend std::istream& operator<<(basic_json& j, std::istream& i) + { + return operator>>(i, j); + } + + /// @brief deserialize from stream + /// @sa https://json.nlohmann.me/api/basic_json/operator_gtgt/ + friend std::istream& operator>>(std::istream& i, basic_json& j) + { + parser(detail::input_adapter(i)).parse(false, j); + return i; + } +#endif // JSON_NO_IO + /// @} + + /////////////////////////// + // convenience functions // + /////////////////////////// + + /// @brief return the type as string + /// @sa https://json.nlohmann.me/api/basic_json/type_name/ + JSON_HEDLEY_RETURNS_NON_NULL + const char* type_name() const noexcept + { + switch (m_type) + { + case value_t::null: + return "null"; + case value_t::object: + return "object"; + case value_t::array: + return "array"; + case value_t::string: + return "string"; + case value_t::boolean: + return "boolean"; + case value_t::binary: + return "binary"; + case value_t::discarded: + return "discarded"; + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + default: + return "number"; + } + } + + + JSON_PRIVATE_UNLESS_TESTED: + ////////////////////// + // member variables // + ////////////////////// + + /// the type of the current element + value_t m_type = value_t::null; + + /// the value of the current element + json_value m_value = {}; + +#if JSON_DIAGNOSTICS + /// a pointer to a parent value (for debugging purposes) + basic_json* m_parent = nullptr; +#endif + + ////////////////////////////////////////// + // binary serialization/deserialization // + ////////////////////////////////////////// + + /// @name binary serialization/deserialization support + /// @{ + + public: + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static std::vector<std::uint8_t> to_cbor(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_cbor(j, result); + return result; + } + + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static void to_cbor(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_cbor(j); + } + + /// @brief create a CBOR serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_cbor/ + static void to_cbor(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_cbor(j); + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static std::vector<std::uint8_t> to_msgpack(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_msgpack(j, result); + return result; + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static void to_msgpack(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_msgpack(j); + } + + /// @brief create a MessagePack serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_msgpack/ + static void to_msgpack(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_msgpack(j); + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static std::vector<std::uint8_t> to_ubjson(const basic_json& j, + const bool use_size = false, + const bool use_type = false) + { + std::vector<std::uint8_t> result; + to_ubjson(j, result, use_size, use_type); + return result; + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static void to_ubjson(const basic_json& j, detail::output_adapter<std::uint8_t> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<std::uint8_t>(o).write_ubjson(j, use_size, use_type); + } + + /// @brief create a UBJSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_ubjson/ + static void to_ubjson(const basic_json& j, detail::output_adapter<char> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<char>(o).write_ubjson(j, use_size, use_type); + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static std::vector<std::uint8_t> to_bjdata(const basic_json& j, + const bool use_size = false, + const bool use_type = false) + { + std::vector<std::uint8_t> result; + to_bjdata(j, result, use_size, use_type); + return result; + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static void to_bjdata(const basic_json& j, detail::output_adapter<std::uint8_t> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<std::uint8_t>(o).write_ubjson(j, use_size, use_type, true, true); + } + + /// @brief create a BJData serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bjdata/ + static void to_bjdata(const basic_json& j, detail::output_adapter<char> o, + const bool use_size = false, const bool use_type = false) + { + binary_writer<char>(o).write_ubjson(j, use_size, use_type, true, true); + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static std::vector<std::uint8_t> to_bson(const basic_json& j) + { + std::vector<std::uint8_t> result; + to_bson(j, result); + return result; + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static void to_bson(const basic_json& j, detail::output_adapter<std::uint8_t> o) + { + binary_writer<std::uint8_t>(o).write_bson(j); + } + + /// @brief create a BSON serialization of a given JSON value + /// @sa https://json.nlohmann.me/api/basic_json/to_bson/ + static void to_bson(const basic_json& j, detail::output_adapter<char> o) + { + binary_writer<char>(o).write_bson(j); + } + + /// @brief create a JSON value from an input in CBOR format + /// @sa https://json.nlohmann.me/api/basic_json/from_cbor/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in CBOR format + /// @sa https://json.nlohmann.me/api/basic_json/from_cbor/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + return from_cbor(ptr, ptr + len, strict, allow_exceptions, tag_handler); + } + + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::cbor).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in MessagePack format + /// @sa https://json.nlohmann.me/api/basic_json/from_msgpack/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in MessagePack format + /// @sa https://json.nlohmann.me/api/basic_json/from_msgpack/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_msgpack(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::msgpack).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in UBJSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_ubjson/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in UBJSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_ubjson/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_ubjson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::ubjson).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + + /// @brief create a JSON value from an input in BJData format + /// @sa https://json.nlohmann.me/api/basic_json/from_bjdata/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bjdata(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bjdata).sax_parse(input_format_t::bjdata, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BJData format + /// @sa https://json.nlohmann.me/api/basic_json/from_bjdata/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bjdata(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bjdata).sax_parse(input_format_t::bjdata, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_bson/ + template<typename InputType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward<InputType>(i)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /// @brief create a JSON value from an input in BSON format + /// @sa https://json.nlohmann.me/api/basic_json/from_bson/ + template<typename IteratorType> + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template<typename T> + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_bson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser<basic_json> sdp(result, allow_exceptions); + auto ia = i.get(); + // NOLINTNEXTLINE(hicpp-move-const-arg,performance-move-const-arg) + const bool res = binary_reader<decltype(ia)>(std::move(ia), input_format_t::bson).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + /// @} + + ////////////////////////// + // JSON Pointer support // + ////////////////////////// + + /// @name JSON Pointer functions + /// @{ + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + reference operator[](const json_pointer& ptr) + { + return ptr.get_unchecked(this); + } + + template<typename BasicJsonType, detail::enable_if_t<detail::is_basic_json<BasicJsonType>::value, int> = 0> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + reference operator[](const ::nlohmann::json_pointer<BasicJsonType>& ptr) + { + return ptr.get_unchecked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/operator%5B%5D/ + const_reference operator[](const json_pointer& ptr) const + { + return ptr.get_unchecked(this); + } + + template<typename BasicJsonType, detail::enable_if_t<detail::is_basic_json<BasicJsonType>::value, int> = 0> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + const_reference operator[](const ::nlohmann::json_pointer<BasicJsonType>& ptr) const + { + return ptr.get_unchecked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/at/ + reference at(const json_pointer& ptr) + { + return ptr.get_checked(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + reference at(const ::nlohmann::json_pointer<BasicJsonType>& ptr) + { + return ptr.get_checked(this); + } + + /// @brief access specified element via JSON Pointer + /// @sa https://json.nlohmann.me/api/basic_json/at/ + const_reference at(const json_pointer& ptr) const + { + return ptr.get_checked(this); + } + + template<typename BasicJsonType> + JSON_HEDLEY_DEPRECATED_FOR(3.11.0, basic_json::json_pointer or nlohmann::json_pointer<basic_json::string_t>) // NOLINT(readability/alt_tokens) + const_reference at(const ::nlohmann::json_pointer<BasicJsonType>& ptr) const + { + return ptr.get_checked(this); + } + + /// @brief return flattened JSON value + /// @sa https://json.nlohmann.me/api/basic_json/flatten/ + basic_json flatten() const + { + basic_json result(value_t::object); + json_pointer::flatten("", *this, result); + return result; + } + + /// @brief unflatten a previously flattened JSON value + /// @sa https://json.nlohmann.me/api/basic_json/unflatten/ + basic_json unflatten() const + { + return json_pointer::unflatten(*this); + } + + /// @} + + ////////////////////////// + // JSON Patch functions // + ////////////////////////// + + /// @name JSON Patch functions + /// @{ + + /// @brief applies a JSON patch + /// @sa https://json.nlohmann.me/api/basic_json/patch/ + basic_json patch(const basic_json& json_patch) const + { + // make a working copy to apply the patch to + basic_json result = *this; + + // the valid JSON Patch operations + enum class patch_operations {add, remove, replace, move, copy, test, invalid}; + + const auto get_op = [](const std::string & op) + { + if (op == "add") + { + return patch_operations::add; + } + if (op == "remove") + { + return patch_operations::remove; + } + if (op == "replace") + { + return patch_operations::replace; + } + if (op == "move") + { + return patch_operations::move; + } + if (op == "copy") + { + return patch_operations::copy; + } + if (op == "test") + { + return patch_operations::test; + } + + return patch_operations::invalid; + }; + + // wrapper for "add" operation; add value at ptr + const auto operation_add = [&result](json_pointer & ptr, basic_json val) + { + // adding to the root of the target document means replacing it + if (ptr.empty()) + { + result = val; + return; + } + + // make sure the top element of the pointer exists + json_pointer top_pointer = ptr.top(); + if (top_pointer != ptr) + { + result.at(top_pointer); + } + + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result[ptr]; + + switch (parent.m_type) + { + case value_t::null: + case value_t::object: + { + // use operator[] to add value + parent[last_path] = val; + break; + } + + case value_t::array: + { + if (last_path == "-") + { + // special case: append to back + parent.push_back(val); + } + else + { + const auto idx = json_pointer::template array_index<basic_json_t>(last_path); + if (JSON_HEDLEY_UNLIKELY(idx > parent.size())) + { + // avoid undefined behavior + JSON_THROW(out_of_range::create(401, detail::concat("array index ", std::to_string(idx), " is out of range"), &parent)); + } + + // default case: insert add offset + parent.insert(parent.begin() + static_cast<difference_type>(idx), val); + } + break; + } + + // if there exists a parent it cannot be primitive + case value_t::string: // LCOV_EXCL_LINE + case value_t::boolean: // LCOV_EXCL_LINE + case value_t::number_integer: // LCOV_EXCL_LINE + case value_t::number_unsigned: // LCOV_EXCL_LINE + case value_t::number_float: // LCOV_EXCL_LINE + case value_t::binary: // LCOV_EXCL_LINE + case value_t::discarded: // LCOV_EXCL_LINE + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert) LCOV_EXCL_LINE + } + }; + + // wrapper for "remove" operation; remove value at ptr + const auto operation_remove = [this, &result](json_pointer & ptr) + { + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result.at(ptr); + + // remove child + if (parent.is_object()) + { + // perform range check + auto it = parent.find(last_path); + if (JSON_HEDLEY_LIKELY(it != parent.end())) + { + parent.erase(it); + } + else + { + JSON_THROW(out_of_range::create(403, detail::concat("key '", last_path, "' not found"), this)); + } + } + else if (parent.is_array()) + { + // note erase performs range check + parent.erase(json_pointer::template array_index<basic_json_t>(last_path)); + } + }; + + // type check: top level value must be an array + if (JSON_HEDLEY_UNLIKELY(!json_patch.is_array())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects", &json_patch)); + } + + // iterate and apply the operations + for (const auto& val : json_patch) + { + // wrapper to get a value for an operation + const auto get_value = [&val](const std::string & op, + const std::string & member, + bool string_type) -> basic_json & + { + // find value + auto it = val.m_value.object->find(member); + + // context-sensitive error message + const auto error_msg = (op == "op") ? "operation" : detail::concat("operation '", op, '\''); + + // check if desired value is present + if (JSON_HEDLEY_UNLIKELY(it == val.m_value.object->end())) + { + // NOLINTNEXTLINE(performance-inefficient-string-concatenation) + JSON_THROW(parse_error::create(105, 0, detail::concat(error_msg, " must have member '", member, "'"), &val)); + } + + // check if result is of type string + if (JSON_HEDLEY_UNLIKELY(string_type && !it->second.is_string())) + { + // NOLINTNEXTLINE(performance-inefficient-string-concatenation) + JSON_THROW(parse_error::create(105, 0, detail::concat(error_msg, " must have string member '", member, "'"), &val)); + } + + // no error: return value + return it->second; + }; + + // type check: every element of the array must be an object + if (JSON_HEDLEY_UNLIKELY(!val.is_object())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects", &val)); + } + + // collect mandatory members + const auto op = get_value("op", "op", true).template get<std::string>(); + const auto path = get_value(op, "path", true).template get<std::string>(); + json_pointer ptr(path); + + switch (get_op(op)) + { + case patch_operations::add: + { + operation_add(ptr, get_value("add", "value", false)); + break; + } + + case patch_operations::remove: + { + operation_remove(ptr); + break; + } + + case patch_operations::replace: + { + // the "path" location must exist - use at() + result.at(ptr) = get_value("replace", "value", false); + break; + } + + case patch_operations::move: + { + const auto from_path = get_value("move", "from", true).template get<std::string>(); + json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The move operation is functionally identical to a + // "remove" operation on the "from" location, followed + // immediately by an "add" operation at the target + // location with the value that was just removed. + operation_remove(from_ptr); + operation_add(ptr, v); + break; + } + + case patch_operations::copy: + { + const auto from_path = get_value("copy", "from", true).template get<std::string>(); + const json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The copy is functionally identical to an "add" + // operation at the target location using the value + // specified in the "from" member. + operation_add(ptr, v); + break; + } + + case patch_operations::test: + { + bool success = false; + JSON_TRY + { + // check if "value" matches the one at "path" + // the "path" location must exist - use at() + success = (result.at(ptr) == get_value("test", "value", false)); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + // ignore out of range errors: success remains false + } + + // throw an exception if test fails + if (JSON_HEDLEY_UNLIKELY(!success)) + { + JSON_THROW(other_error::create(501, detail::concat("unsuccessful: ", val.dump()), &val)); + } + + break; + } + + case patch_operations::invalid: + default: + { + // op must be "add", "remove", "replace", "move", "copy", or + // "test" + JSON_THROW(parse_error::create(105, 0, detail::concat("operation value '", op, "' is invalid"), &val)); + } + } + } + + return result; + } + + /// @brief creates a diff as a JSON patch + /// @sa https://json.nlohmann.me/api/basic_json/diff/ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json diff(const basic_json& source, const basic_json& target, + const std::string& path = "") + { + // the patch + basic_json result(value_t::array); + + // if the values are the same, return empty patch + if (source == target) + { + return result; + } + + if (source.type() != target.type()) + { + // different types: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + return result; + } + + switch (source.type()) + { + case value_t::array: + { + // first pass: traverse common elements + std::size_t i = 0; + while (i < source.size() && i < target.size()) + { + // recursive call to compare array values at index i + auto temp_diff = diff(source[i], target[i], detail::concat(path, '/', std::to_string(i))); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + ++i; + } + + // We now reached the end of at least one array + // in a second pass, traverse the remaining elements + + // remove my remaining elements + const auto end_index = static_cast<difference_type>(result.size()); + while (i < source.size()) + { + // add operations in reverse order to avoid invalid + // indices + result.insert(result.begin() + end_index, object( + { + {"op", "remove"}, + {"path", detail::concat(path, '/', std::to_string(i))} + })); + ++i; + } + + // add other remaining elements + while (i < target.size()) + { + result.push_back( + { + {"op", "add"}, + {"path", detail::concat(path, "/-")}, + {"value", target[i]} + }); + ++i; + } + + break; + } + + case value_t::object: + { + // first pass: traverse this object's elements + for (auto it = source.cbegin(); it != source.cend(); ++it) + { + // escape the key name to be used in a JSON patch + const auto path_key = detail::concat(path, '/', detail::escape(it.key())); + + if (target.find(it.key()) != target.end()) + { + // recursive call to compare object values at key it + auto temp_diff = diff(it.value(), target[it.key()], path_key); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + } + else + { + // found a key that is not in o -> remove it + result.push_back(object( + { + {"op", "remove"}, {"path", path_key} + })); + } + } + + // second pass: traverse other object's elements + for (auto it = target.cbegin(); it != target.cend(); ++it) + { + if (source.find(it.key()) == source.end()) + { + // found a key that is not in this -> add it + const auto path_key = detail::concat(path, '/', detail::escape(it.key())); + result.push_back( + { + {"op", "add"}, {"path", path_key}, + {"value", it.value()} + }); + } + } + + break; + } + + case value_t::null: + case value_t::string: + case value_t::boolean: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::number_float: + case value_t::binary: + case value_t::discarded: + default: + { + // both primitive type: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + break; + } + } + + return result; + } + + /// @} + + //////////////////////////////// + // JSON Merge Patch functions // + //////////////////////////////// + + /// @name JSON Merge Patch functions + /// @{ + + /// @brief applies a JSON Merge Patch + /// @sa https://json.nlohmann.me/api/basic_json/merge_patch/ + void merge_patch(const basic_json& apply_patch) + { + if (apply_patch.is_object()) + { + if (!is_object()) + { + *this = object(); + } + for (auto it = apply_patch.begin(); it != apply_patch.end(); ++it) + { + if (it.value().is_null()) + { + erase(it.key()); + } + else + { + operator[](it.key()).merge_patch(it.value()); + } + } + } + else + { + *this = apply_patch; + } + } + + /// @} +}; + +/// @brief user-defined to_string function for JSON values +/// @sa https://json.nlohmann.me/api/basic_json/to_string/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +std::string to_string(const NLOHMANN_BASIC_JSON_TPL& j) +{ + return j.dump(); +} + +} // namespace nlohmann + +/////////////////////// +// nonmember support // +/////////////////////// + +namespace std // NOLINT(cert-dcl58-cpp) +{ + +/// @brief hash value for JSON objects +/// @sa https://json.nlohmann.me/api/basic_json/std_hash/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct hash<nlohmann::NLOHMANN_BASIC_JSON_TPL> +{ + std::size_t operator()(const nlohmann::NLOHMANN_BASIC_JSON_TPL& j) const + { + return nlohmann::detail::hash(j); + } +}; + +// specialization for std::less<value_t> +template<> +struct less< ::nlohmann::detail::value_t> // do not remove the space after '<', see https://github.com/nlohmann/json/pull/679 +{ + /*! + @brief compare two value_t enum values + @since version 3.0.0 + */ + bool operator()(::nlohmann::detail::value_t lhs, + ::nlohmann::detail::value_t rhs) const noexcept + { +#if JSON_HAS_THREE_WAY_COMPARISON + return std::is_lt(lhs <=> rhs); // *NOPAD* +#else + return ::nlohmann::detail::operator<(lhs, rhs); +#endif + } +}; + +// C++20 prohibit function specialization in the std namespace. +#ifndef JSON_HAS_CPP_20 + +/// @brief exchanges the values of two JSON objects +/// @sa https://json.nlohmann.me/api/basic_json/std_swap/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +inline void swap(nlohmann::NLOHMANN_BASIC_JSON_TPL& j1, nlohmann::NLOHMANN_BASIC_JSON_TPL& j2) noexcept( // NOLINT(readability-inconsistent-declaration-parameter-name) + is_nothrow_move_constructible<nlohmann::NLOHMANN_BASIC_JSON_TPL>::value&& // NOLINT(misc-redundant-expression) + is_nothrow_move_assignable<nlohmann::NLOHMANN_BASIC_JSON_TPL>::value) +{ + j1.swap(j2); +} + +#endif + +} // namespace std + +/// @brief user-defined string literal for JSON values +/// @sa https://json.nlohmann.me/api/basic_json/operator_literal_json/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json operator "" _json(const char* s, std::size_t n) +{ + return nlohmann::json::parse(s, s + n); +} + +/// @brief user-defined string literal for JSON pointer +/// @sa https://json.nlohmann.me/api/basic_json/operator_literal_json_pointer/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std::size_t n) +{ + return nlohmann::json::json_pointer(std::string(s, n)); +} + +// #include <nlohmann/detail/macro_unscope.hpp> + + +// restore clang diagnostic settings +#if defined(__clang__) + #pragma clang diagnostic pop +#endif + +// clean up +#undef JSON_ASSERT +#undef JSON_INTERNAL_CATCH +#undef JSON_THROW +#undef JSON_PRIVATE_UNLESS_TESTED +#undef NLOHMANN_BASIC_JSON_TPL_DECLARATION +#undef NLOHMANN_BASIC_JSON_TPL +#undef JSON_EXPLICIT +#undef NLOHMANN_CAN_CALL_STD_FUNC_IMPL +#undef JSON_INLINE_VARIABLE +#undef JSON_NO_UNIQUE_ADDRESS + +#ifndef JSON_TEST_KEEP_MACROS + #undef JSON_CATCH + #undef JSON_TRY + #undef JSON_HAS_CPP_11 + #undef JSON_HAS_CPP_14 + #undef JSON_HAS_CPP_17 + #undef JSON_HAS_CPP_20 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #undef JSON_HAS_THREE_WAY_COMPARISON + #undef JSON_HAS_RANGES + #undef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON +#endif + +// #include <nlohmann/thirdparty/hedley/hedley_undef.hpp> + + +#undef JSON_HEDLEY_ALWAYS_INLINE +#undef JSON_HEDLEY_ARM_VERSION +#undef JSON_HEDLEY_ARM_VERSION_CHECK +#undef JSON_HEDLEY_ARRAY_PARAM +#undef JSON_HEDLEY_ASSUME +#undef JSON_HEDLEY_BEGIN_C_DECLS +#undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#undef JSON_HEDLEY_CLANG_HAS_FEATURE +#undef JSON_HEDLEY_CLANG_HAS_WARNING +#undef JSON_HEDLEY_COMPCERT_VERSION +#undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#undef JSON_HEDLEY_CONCAT +#undef JSON_HEDLEY_CONCAT3 +#undef JSON_HEDLEY_CONCAT3_EX +#undef JSON_HEDLEY_CONCAT_EX +#undef JSON_HEDLEY_CONST +#undef JSON_HEDLEY_CONSTEXPR +#undef JSON_HEDLEY_CONST_CAST +#undef JSON_HEDLEY_CPP_CAST +#undef JSON_HEDLEY_CRAY_VERSION +#undef JSON_HEDLEY_CRAY_VERSION_CHECK +#undef JSON_HEDLEY_C_DECL +#undef JSON_HEDLEY_DEPRECATED +#undef JSON_HEDLEY_DEPRECATED_FOR +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#undef JSON_HEDLEY_DIAGNOSTIC_POP +#undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#undef JSON_HEDLEY_DMC_VERSION +#undef JSON_HEDLEY_DMC_VERSION_CHECK +#undef JSON_HEDLEY_EMPTY_BASES +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#undef JSON_HEDLEY_END_C_DECLS +#undef JSON_HEDLEY_FLAGS +#undef JSON_HEDLEY_FLAGS_CAST +#undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_BUILTIN +#undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_EXTENSION +#undef JSON_HEDLEY_GCC_HAS_FEATURE +#undef JSON_HEDLEY_GCC_HAS_WARNING +#undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#undef JSON_HEDLEY_GCC_VERSION +#undef JSON_HEDLEY_GCC_VERSION_CHECK +#undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#undef JSON_HEDLEY_GNUC_HAS_FEATURE +#undef JSON_HEDLEY_GNUC_HAS_WARNING +#undef JSON_HEDLEY_GNUC_VERSION +#undef JSON_HEDLEY_GNUC_VERSION_CHECK +#undef JSON_HEDLEY_HAS_ATTRIBUTE +#undef JSON_HEDLEY_HAS_BUILTIN +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_HAS_EXTENSION +#undef JSON_HEDLEY_HAS_FEATURE +#undef JSON_HEDLEY_HAS_WARNING +#undef JSON_HEDLEY_IAR_VERSION +#undef JSON_HEDLEY_IAR_VERSION_CHECK +#undef JSON_HEDLEY_IBM_VERSION +#undef JSON_HEDLEY_IBM_VERSION_CHECK +#undef JSON_HEDLEY_IMPORT +#undef JSON_HEDLEY_INLINE +#undef JSON_HEDLEY_INTEL_CL_VERSION +#undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK +#undef JSON_HEDLEY_INTEL_VERSION +#undef JSON_HEDLEY_INTEL_VERSION_CHECK +#undef JSON_HEDLEY_IS_CONSTANT +#undef JSON_HEDLEY_IS_CONSTEXPR_ +#undef JSON_HEDLEY_LIKELY +#undef JSON_HEDLEY_MALLOC +#undef JSON_HEDLEY_MCST_LCC_VERSION +#undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK +#undef JSON_HEDLEY_MESSAGE +#undef JSON_HEDLEY_MSVC_VERSION +#undef JSON_HEDLEY_MSVC_VERSION_CHECK +#undef JSON_HEDLEY_NEVER_INLINE +#undef JSON_HEDLEY_NON_NULL +#undef JSON_HEDLEY_NO_ESCAPE +#undef JSON_HEDLEY_NO_RETURN +#undef JSON_HEDLEY_NO_THROW +#undef JSON_HEDLEY_NULL +#undef JSON_HEDLEY_PELLES_VERSION +#undef JSON_HEDLEY_PELLES_VERSION_CHECK +#undef JSON_HEDLEY_PGI_VERSION +#undef JSON_HEDLEY_PGI_VERSION_CHECK +#undef JSON_HEDLEY_PREDICT +#undef JSON_HEDLEY_PRINTF_FORMAT +#undef JSON_HEDLEY_PRIVATE +#undef JSON_HEDLEY_PUBLIC +#undef JSON_HEDLEY_PURE +#undef JSON_HEDLEY_REINTERPRET_CAST +#undef JSON_HEDLEY_REQUIRE +#undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#undef JSON_HEDLEY_REQUIRE_MSG +#undef JSON_HEDLEY_RESTRICT +#undef JSON_HEDLEY_RETURNS_NON_NULL +#undef JSON_HEDLEY_SENTINEL +#undef JSON_HEDLEY_STATIC_ASSERT +#undef JSON_HEDLEY_STATIC_CAST +#undef JSON_HEDLEY_STRINGIFY +#undef JSON_HEDLEY_STRINGIFY_EX +#undef JSON_HEDLEY_SUNPRO_VERSION +#undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#undef JSON_HEDLEY_TINYC_VERSION +#undef JSON_HEDLEY_TINYC_VERSION_CHECK +#undef JSON_HEDLEY_TI_ARMCL_VERSION +#undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL2000_VERSION +#undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL430_VERSION +#undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL6X_VERSION +#undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL7X_VERSION +#undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CLPRU_VERSION +#undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#undef JSON_HEDLEY_TI_VERSION +#undef JSON_HEDLEY_TI_VERSION_CHECK +#undef JSON_HEDLEY_UNAVAILABLE +#undef JSON_HEDLEY_UNLIKELY +#undef JSON_HEDLEY_UNPREDICTABLE +#undef JSON_HEDLEY_UNREACHABLE +#undef JSON_HEDLEY_UNREACHABLE_RETURN +#undef JSON_HEDLEY_VERSION +#undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#undef JSON_HEDLEY_VERSION_DECODE_MINOR +#undef JSON_HEDLEY_VERSION_DECODE_REVISION +#undef JSON_HEDLEY_VERSION_ENCODE +#undef JSON_HEDLEY_WARNING +#undef JSON_HEDLEY_WARN_UNUSED_RESULT +#undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#undef JSON_HEDLEY_FALL_THROUGH + + + +#endif // INCLUDE_NLOHMANN_JSON_HPP_ diff --git a/gateway-main/librairies/cppGate/main.cpp b/gateway-main/librairies/cppGate/main.cpp new file mode 100644 index 0000000000000000000000000000000000000000..73d8b18d762f7f34eca3c3a966525fe29e6c728a --- /dev/null +++ b/gateway-main/librairies/cppGate/main.cpp @@ -0,0 +1,38 @@ +// // +// // Created by tlabrosse on july 2022 +// // licence : GNU lgpl +// // you can contact me at : theo.labt@gmail.com +// // + +// #include <iostream> +// #include "SenderStub.h" + +// using namespace std; + +// exemple d'utilisation en tant que sender + +// int main() { + +// ExecFile* execFile = new ExecFile("/home/tlabrosse/PycharmProjects/pythonGate/", "testReceiver.py", "python3 "); +// OutputFile* outputFile = new OutputFile("/home/tlabrosse/Bureau/gateway/c++/cppGate/build/", "outputs.json"); +// SenderStub* sndStub = new SenderStub(execFile, outputFile); + +// Dictionary* dico1 = new Dictionary("dico1"); +// dico1->addParameter("hey", "toi"); +// dico1->addParameter("para2", "value2"); + +// Dictionary* dico2 = new Dictionary("dico2"); +// dico2->addParameter("para3", "value3"); +// dico2->addArgument(dico1); + +// Dictionary* dico3 = new Dictionary("dico3"); +// dico3->addParameter("heyo", "salut"); + +// sndStub->addDictionary(dico2); +// sndStub->addDictionary(dico3); + +// sndStub->run("/home/tlabrosse/Bureau/gateway/c++/gateway/build/", "gate.o"); +// outputFile->displayContent(); + +// return 0; +// } diff --git a/gateway-main/librairies/pythonGate/__init__.py b/gateway-main/librairies/pythonGate/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f8e9a988a1218003d852a679f40aa4c3ffdb1156 --- /dev/null +++ b/gateway-main/librairies/pythonGate/__init__.py @@ -0,0 +1,10 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +from .argument import Dictionary, Parameter +from .file import File, ExecFile, OutputFile +from .stub import SenderStub, ReceiverStub + diff --git a/gateway-main/librairies/pythonGate/argument.py b/gateway-main/librairies/pythonGate/argument.py new file mode 100644 index 0000000000000000000000000000000000000000..670bfa095f5b568cfd43fb167aeb98c19dfcef53 --- /dev/null +++ b/gateway-main/librairies/pythonGate/argument.py @@ -0,0 +1,183 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +import json +from abc import abstractmethod +from .serializable import Serializable + + +class Argument(Serializable): + def __init__(self, name): + self.name = name + self.value = None + + @abstractmethod + def serialize(self) -> dict: + pass + + def display(self): + pass + + +class Parameter(Argument): + def __init__(self, name, value): + super().__init__(name) + self.value = value + + def display(self) -> str: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + + def getArgument(self, name: str) -> "Argument | None": + if self.name == name: + return self + return None + + def getValueAsInt(self): + return int(self.value) + + def getValueAsFloat(self): + return float(self.value) + + def getValueAsList(self): + return list(json.loads(self.value)) + + def serialize(self) -> dict: + return { + "Parameter": { + "name": self.name, + "value": self.value + } + } + + +class Dictionary(Argument): + def __init__(self, name): + super().__init__(name) + self.value = [] + + def addParameter(self, name: str, value): + """ + This function will add a parameter to the dictionary. + + Parameters + ---------- + name : str + The name of the parameter + + value + The value of the parameter + """ + self.addArgument(Parameter(name, str(value))) + + def addArgument(self, argument: Argument) -> None: + """ + This function will add an argument to the Dictionary. + + Parameters + ---------- + argument : Argument + The argument to add + + Raises + ------ + TypeError + If the given parameter is not of Argument type + """ + if isinstance(argument, Argument): + self.value.append(argument) + else: + raise TypeError("parameter needs to inherits from Argument") + + def getArgument(self, name: str) -> "Argument | None": + """This function will return an argument if it exists with the given name. + If no argument exists with the given name, it will return None. + + Parameters + ---------- + name : str + The name of the argument + + Returns + ------- + Argument, None + The argument with the right name, or nothing + """ + if self.name == name: + return self + + for argument in self.value: + arg = argument.getArgument(name) + if arg is not None: + return arg + + return None + + def getParameter(self, name: str) -> "Parameter | None": + """ + This function will return a parameter if it exists with the given name. + If no parameter exists with the given name, it will return None. + + Parameters + ---------- + name : str + The name of the parameter + + Returns + ------- + Parameter, None + The parameter with the right name, or nothing + """ + for argument in self.value: + arg = argument.getArgument(name) + if arg is not None: + if type(arg) is Parameter: + return arg + + return None + + def display(self) -> str: + """ + This function will print and return a string representation of the dictionary. + + Returns + ------- + str + The string representation of the dictionary + """ + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + + def serialize(self) -> dict: + dico = { + "Dictionary": { + "name": self.name + } + } + value = [] + + for val in self.value: + value.append(val.serialize()) + dico["Dictionary"]["value"] = value + + return dico + + def deserialize(self, dico: dict) -> None: + self.name = dico["name"] + + value_json = dico["value"] + + for val in value_json: + if "Dictionary" in val: + dictionary = Dictionary(val["Dictionary"]["name"]) + dictionary.deserialize(val["Dictionary"]) + + self.value.append(dictionary) + elif "Parameter" in val: + parameter = Parameter(val["Parameter"]["name"], val["Parameter"]["value"]) + self.value.append(parameter) diff --git a/gateway-main/librairies/pythonGate/file.py b/gateway-main/librairies/pythonGate/file.py new file mode 100644 index 0000000000000000000000000000000000000000..1b4b03b8575c007470674bd69ef28182b10e4846 --- /dev/null +++ b/gateway-main/librairies/pythonGate/file.py @@ -0,0 +1,120 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +from .argument import * + + +class File(Serializable): + def __init__(self, path: str, name: str): + self.name = name + self.path = path + self.actif = True + + def serialize(self) -> dict: + if self.actif: + return { + "File": { + "name": self.name, + "path": self.path + } + } + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" + + +class ExecFile(File): + def __init__(self, path: str, name: str, cmd: str, cmd_linux: str = ""): + super().__init__(path, name) + self.cmd = cmd + self.cmd_linux = cmd_linux + + def serialize(self) -> dict: + if self.actif: + dico = super().serialize() + dico["ExecFile"] = dico.pop("File") + dico["ExecFile"]["cmd"] = self.cmd + dico["ExecFile"]["cmdAlt"] = self.cmd_linux + + return dico + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" + + +class OutputFile(File): + def __init__(self, path: str, name: str): + super().__init__(path, name) + + def displayContent(self): + if self.actif: + json_line = json.dumps(self.read(), indent=2) + print(json_line) + return json_line + return "" + + def readAsDictionary(self) -> Dictionary: + if self.actif: + dictionary_dict = Dictionary("outputFile") + + file = self.read() + output_dico = Dictionary("Outputs") + for output in file["Outputs"]: + dico = Dictionary(output["Dictionary"]["name"]) + dico.deserialize(output["Dictionary"]) + output_dico.addArgument(dico) + + dictionary_dict.addArgument(output_dico) + + return dictionary_dict + return Dictionary("") + + def read(self) -> dict: + if self.actif: + file = open(self.path + self.name, 'r') + lines = file.readlines() + file.close() + + json_file = "\n".join(lines) + return json.loads(json_file) + return {} + + def writeOutput(self, dictionary: Dictionary): + if self.actif: + file = self.read() + outputs = file["Outputs"] + outputs.append(dictionary.serialize()) + file["Outputs"] = outputs + + file_json = json.dumps(file, indent=2) + file = open(self.path + self.name, 'w') + file.write(file_json) + file.close() + + def serialize(self) -> dict: + if self.actif: + dico = super().serialize() + dico["OutputFile"] = dico.pop("File") + + return dico + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" diff --git a/gateway-main/librairies/pythonGate/serializable.py b/gateway-main/librairies/pythonGate/serializable.py new file mode 100644 index 0000000000000000000000000000000000000000..368541c2f902d7a7412281bccacdc57f80f7ff9e --- /dev/null +++ b/gateway-main/librairies/pythonGate/serializable.py @@ -0,0 +1,13 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +from abc import ABC, abstractmethod + + +class Serializable(ABC): + @abstractmethod + def serialize(self) -> dict: + return {} diff --git a/gateway-main/librairies/pythonGate/stub.py b/gateway-main/librairies/pythonGate/stub.py new file mode 100644 index 0000000000000000000000000000000000000000..6c48a38d4559229640dee2d027d0bb7deb7b99d5 --- /dev/null +++ b/gateway-main/librairies/pythonGate/stub.py @@ -0,0 +1,140 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +import subprocess +from typing import List +import sys + +from .file import * + + +class Stub: + def __init__(self, outputFile: "OutputFile | None"): + if outputFile is None: + outputFile = OutputFile("", "") + + self.actif = True + self.outputFile: "OutputFile" = outputFile + self.dictionaries: List[Dictionary] = [] + + def findArgumentWithName(self, name: str): + if self.actif: + dico = None + for dictionary in self.dictionaries: + dico = dictionary.getArgument(name) + if dico is not None: + break + + return dico + return None + + def getArgument(self, name: str): + if self.actif: + return self.findArgumentWithName(name) + return None + + def findDictionaryWithName(self, name: str): + if self.actif: + for dico in self.dictionaries: + if dico.name == name: + return dico + + return None + + def displayDictionaries(self) -> str: + if self.actif: + dictionaries = [] + dico = {} + + for dictionary in self.dictionaries: + dictionaries.append(dictionary.serialize()) + + dico["Dictionaries"] = dictionaries + json_line = json.dumps(dico, indent=2, sort_keys=True) + + print(json_line) + return json_line + return "" + + def displayOutputFile(self) -> str: + if self.actif: + json_line = json.dumps(self.outputFile.serialize(), indent=2, sort_keys=True) + + print(json_line) + return json_line + return "" + + def displayAll(self) -> str: + if self.actif: + return self.displayOutputFile() + self.displayDictionaries() + return "" + + +class SenderStub(Stub): + def __init__(self, execFile=None, outputFile=None): + super().__init__(outputFile) + self.execFile: ExecFile = execFile + + def run(self, gatePath: str, gateName: str): + print(" =============== Running gateway =============== ") + print(gatePath + gateName + " " + self.getSentLine()) + subprocess.run(gatePath + gateName + " " + self.getSentLine(), shell=True) + print(" =============== Gateway ending ================ ") + + def getSentLine(self): + return self.serialize().replace('"', '\\"') + + def serialize(self) -> str: + dico = { + "ExecFile": self.execFile.serialize()["ExecFile"], + "OutputFile": self.outputFile.serialize()["OutputFile"] + } + + dictionaries = [] + + for dictionary in self.dictionaries: + dictionaries.append(dictionary.serialize()) + + dico["Dictionaries"] = dictionaries + + return json.dumps(dico) + + def displayExecFile(self) -> str: + json_line = json.dumps(self.execFile.serialize(), indent=2, sort_keys=True) + + print(json_line) + return json_line + + def displayAll(self) -> str: + return self.displayExecFile() + super().displayAll() + + +class ReceiverStub(Stub): + def __init__(self): + super().__init__(None) + self.actif = True + + self.deserialize(self.readArguments()) + + def readArguments(self) -> str: + if len(sys.argv) > 1: + return str(sys.argv[1]) + self.actif = False + self.outputFile.actif = False + + def deserialize(self, json_line: str) -> None: + if self.actif: + data_line = json.loads(json_line) + + self.outputFile = OutputFile(data_line["OutputFile"]["path"], data_line["OutputFile"]["name"]) + + dictionaries_json = data_line["Dictionaries"] + + for dictionary_json in dictionaries_json: + dictionary = Dictionary(dictionary_json["Dictionary"]["name"]) + dictionary.deserialize(dictionary_json["Dictionary"]) + + self.dictionaries.append(dictionary) diff --git a/gateway-main/librairies/rgate/rgate.R b/gateway-main/librairies/rgate/rgate.R new file mode 100644 index 0000000000000000000000000000000000000000..8167a231415a03156b7092dccca739894ff1e0c0 --- /dev/null +++ b/gateway-main/librairies/rgate/rgate.R @@ -0,0 +1,13 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +# Those are packages you may want to install in order to use the librairie +# # install.packages("R6") +# # install.packages("jsonlite") +# library(R6) +# library(jsonlite) + +source("lib/rgate/Stub.R") diff --git a/gateway-main/librairies/rgate/rgate/Argument.R b/gateway-main/librairies/rgate/rgate/Argument.R new file mode 100644 index 0000000000000000000000000000000000000000..afb8643b8b398d2e529c172c02bcaa5ef6a504e4 --- /dev/null +++ b/gateway-main/librairies/rgate/rgate/Argument.R @@ -0,0 +1,154 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +library(R6) + +# class Argument avec R6 +Argument <- R6Class("Argument", + list( # attributs et méthods public + name = "", + value = NULL, + + initialize = function(name) { + stopifnot(is.character(name), length(name) == 1) + + self$name <- name + } + ) +) + +# class Parameter avec R6 +Parameter <- R6Class("Parameter", inherit = Argument, +public = list( + initialize = function(name, value) { + stopifnot(is.character(value), length(value) == 1) + + self$value <- value + super$initialize(name = name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + return(NULL) + }, + + getValueAsNumeric = function() { + return(as.numeric(self$value)) + }, + + getValueAsList = function() { + return(as.list(fromJSON(self$value))) + }, + + serialize = function() { + return(list( + "Parameter" = list( + "name" = self$name, + "value" = self$value + ) + )) + } +)) + + + +# class Dictionary avec R6 +Dictionary <- R6Class("Dictionary", inherit = Argument, +public = list( + value = list(), + initialize = function(name) { + self$value = list() + + super$initialize(name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + addArgument = function(argument) { + self$value = append(self$value, argument) + invisible(self) + }, + + addParameter = function(name, value) { + self$addArgument(Parameter$new(name, value)) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + return(arg) + } + + return(NULL) + }, + + getParameter = function(name) { + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + if(class(arg) == "Parameter") + return(arg) + } + }, + + serialize = function() { + dico <- list( + "Dictionary" = list( + "name" = self$name + ) + ) + + value = list() + for(val in self$value) { + value = append(value, val$serialize()) + } + dico$Dictionary$value = value + + return(dico) + }, + + deserialize = function(dico) { + value_json = dico + + # Dictionary + for(i in seq_along(value_json$Dictionary$name)) { + + if(!is.null(value_json$Dictionary$name[[i]]) && !is.null(value_json$Dictionary$value[[i]])) { + dictionary = Dictionary$new(value_json$Dictionary$name[[i]]) + dictionary = dictionary$deserialize(value_json$Dictionary$value[[i]]) + + self$value = append(self$value, dictionary) + } + } + + # Paramater + for(i in seq_along(value_json$Parameter$name)) { + + if(!is.null(value_json$Parameter$name[[i]]) && !is.null(value_json$Parameter$value[[i]])) { + parameter = Parameter$new(value_json$Parameter$name[[i]], value_json$Parameter$value[[i]]) + + self$value = append(self$value, parameter) + } + } + + invisible(self) + } +)) diff --git a/gateway-main/librairies/rgate/rgate/File.R b/gateway-main/librairies/rgate/rgate/File.R new file mode 100644 index 0000000000000000000000000000000000000000..965e61837a30fca625835ee43bc8a792f28f2750 --- /dev/null +++ b/gateway-main/librairies/rgate/rgate/File.R @@ -0,0 +1,166 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +library(R6) + +File <- R6Class("File", +public=list( + name = "", + path = "", + actif = TRUE, + + initialize = function(path, name) { + stopifnot(is.character(name), length(name) == 1) + stopifnot(is.character(path), length(path) == 1) + + self$actif=TRUE + + self$name = name + self$path = path + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + return(list( + "File" = list( + "name" = self$name, + "path" = self$path + ) + )) + } + return(list()) + } +)) + + + +ExecFile <- R6Class("ExecFile", inherit = File, +public = list( + cmd = "", + cmdAlt = "", + initialize = function(path, name, cmd, cmdAlt = "") { + stopifnot(is.character(cmd), length(cmd) == 1) + stopifnot(is.character(cmdAlt), length(cmdAlt) == 1) + + self$cmd = cmd + self$cmdAlt = cmdAlt + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$ExecFile = dico$File + dico$File = NULL + + dico$ExecFile$cmd = self$cmd + dico$ExecFile$cmdAlt = self$cmdAlt + + return(dico) + } + return(list()) + } +)) + + + +OutputFile <- R6Class("OutputFile", inherit = File, +public = list( + initialize = function(path, name) { + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$OutputFile = dico$File + dico$File = NULL + + return(dico) + } + return(list()) + }, + + read = function() { + if(self$actif){ + json_line = paste0(readLines(paste0(self$path, self$name))) + line = fromJSON(json_line) + + return(line) + } + return("") + }, + + readAsDictionary = function() { + if(self$actif){ + dictionary_dict = Dictionary$new("outputFile") + + file = self$read() + output_dico = Dictionary$new("Outputs") + for(i in seq_along(file$Outputs)) { + dico = Dictionary$new(file$Outputs$Dictionary$name[[i]]) + dico$deserialize(file$Outputs$Dictionary$value[[i]]) + + output_dico$addArgument(dico) + } + dictionary_dict$addArgument(output_dico) + + return(dictionary_dict) + } + return(Dictionary$new("")) + }, + + displayContent = function() { + if(self$actif){ + json_line = prettify(toJSON(self$read())) + print(json_line) + return(json_line) + } + return("") + }, + + writeOutput = function(dictionary) { + if(self$actif){ + file = toJSON(self$read()) + + to_add = paste0('"Outputs":[', improveSerialize(toJSON(dictionary$serialize()))) + file = paste0(append(strsplit(file, '"Outputs":\\[')[[1]], to_add, after=1)) + + write(prettify(file), paste0(self$path, self$name)) + } + invisible(self) + } + +)) diff --git a/gateway-main/librairies/rgate/rgate/Stub.R b/gateway-main/librairies/rgate/rgate/Stub.R new file mode 100644 index 0000000000000000000000000000000000000000..982cd37bd51dd080c54ca1bc7ba11e581a37e19a --- /dev/null +++ b/gateway-main/librairies/rgate/rgate/Stub.R @@ -0,0 +1,222 @@ +# +# Created by tlabrosse on july 2022. +# licence : GNU lgpl +# you can contact me at : theo.labt@gmail.com +# + +library(R6) +library(jsonlite) +library(stringr) + +source("lib/rgate/File.R") +source("lib/rgate/Argument.R") + +improveSerialize = function(serializedLine) { + serializedLine = gsub(':\\["', ':"', serializedLine) + serializedLine = gsub('"]', '"', serializedLine) + serializedLine = gsub('"]]', '"]', serializedLine) + serializedLine = gsub('"]}', '"}', serializedLine) + serializedLine = gsub( '"],','",', serializedLine) + return(serializedLine) + } +improveDeSerialize = function(serializedLine) { + serializedLine = gsub( ':"', ':\\["', serializedLine) + serializedLine = gsub( ': "', ':\\["', serializedLine) + serializedLine = gsub( '"]', '"]]', serializedLine) + serializedLine = gsub( '"}', '"]}', serializedLine) + serializedLine = gsub( '",', '"],', serializedLine) + return(serializedLine) + } + +Stub <- R6Class("Stub", +public = list( + outputFile = NA, + dictionaries = list(), + actif = TRUE, + + initialize = function(outputFile = NA) { + if(is.na(outputFile)) + outputFile = OutputFile$new("","") + + self$actif=TRUE + + self$outputFile = outputFile + }, + + addDictionary = function(dictionary) { + self$dictionaries = append(self$dictionaries, dictionary) + }, + + displayDictionaries = function() { + if(self$actif) { + dictionaries = list() + dico = list() + + for(dictionary in self$dictionaries) + dictionaries = append(dictionaries, dictionary$serialize()) + + dico$Dictionaries = dictionaries + json_line = toJSON(dico) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayOutputFile = function() { + if(self$actif) { + json_line = toJSON(self$outputFile$serialize()) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayAll = function() { + if(self$actif) { + invisible(paste0(self$displayOutputFile(), self$displayDictionaries())) + } + invisible("") + }, + + getArgument = function(name) { + if(self$actif) { + return(self$findArgumentWithName(name)) + } + return(NULL) + }, + + findArgumentWithName = function(name) { + if(self$actif) { + dico = NULL + for(dictionary in self$dictionaries) { + dico = dictionary$getArgument(name) + if(!is.null(dico)) + break + } + + return(dico) + } + return(NULL) + }, + + findDictionaryWithName = function(name) { + if(self$actif) {- + for(dico in self$dictionaries){ + if(dico$name == name) + return(dico) + } + } + return(NULL) + } +)) + + +SenderStub <- R6Class("SenderStub", inherit = Stub, +public = list( + execFile = NA, + initialize = function(execFile=NA, outputFile=NA) { + super$initialize(outputFile = outputFile) + self$execFile = execFile + }, + + run = function(gatePath, gateName) { + + print(" =============== Running gateway =============== ") + system(paste0(gatePath , gateName , " " , str_replace_all(self$serialize(), '"', '\\\\"'))) + print(" =============== Gateway ending ================ ") + + invisible(self) + }, + + displayExecFile = function() { + + json_line = toJSON(self$execFile$serialize()) + + print(json_line) + return(json_line) + + return("") + }, + + displayAll = function() { + + invisible(paste0(self$displayExecFile(), super$displayAll())) + + invisible("") + }, + + + serialize = function() { + + dico = list( + "ExecFile" = self$execFile$serialize()$ExecFile, + "OutputFile" = self$outputFile$serialize()$OutputFile + ) + + dictionaries = list() + + for(dictionary in self$dictionaries) { + dictionaries = append(dictionaries, dictionary$serialize()) + } + + dico$Dictionaries = dictionaries + + return(improveSerialize(toJSON(dico))) + + return("") + } +)) + + +ReceiverStub <-R6Class("ReceiverStub", inherit = Stub, +public = list( + + initialize = function() { + super$initialize(NA) + + self$deserialize(self$readArguments()) + }, + + readArguments = function() { + if(length(commandArgs(TRUE)) > 0) + return(paste(commandArgs(TRUE),collapse = ' ')) + + self$actif = FALSE + self$outputFile$actif = FALSE + + for(dico in self$dictionaries) { + dico$actif = FALSE + } + + return(-1) + }, + + deserialize = function(json_line) { + if(self$actif) { + if(json_line != -1) { + + json_line = improveDeSerialize(json_line) + data_line = fromJSON(json_line) + + self$outputFile = OutputFile$new(data_line$OutputFile$path[[1]], data_line$OutputFile$name[[1]]) + + dictionaries_json = data_line$Dictionaries + + for (i in seq_along(dictionaries_json)) { + dictionary = Dictionary$new(dictionaries_json[[i]]$name[[1]]) + dictionary$deserialize(dictionaries_json[[i]]$value[[1]]) + + self$dictionaries = append(self$dictionaries, dictionary) + } + } + } + + invisible(self) + } + +)) diff --git a/irrigation-R-codes/.RData b/irrigation-R-codes/.RData new file mode 100644 index 0000000000000000000000000000000000000000..9f730a25ac7ac881563ce332581f36154f7bf5b7 Binary files /dev/null and b/irrigation-R-codes/.RData differ diff --git a/irrigation-R-codes/.Rhistory b/irrigation-R-codes/.Rhistory new file mode 100644 index 0000000000000000000000000000000000000000..27a2af7d9bf446c0d9e69dc780c4ad0b1ea8aa3e --- /dev/null +++ b/irrigation-R-codes/.Rhistory @@ -0,0 +1,512 @@ +obj <- NULL; obj2 <- NULL; obj3 <- NULL +while (length(na.omit(obj)) == 0 | length(na.omit(obj2)) == 0 | length(na.omit(obj3)) == 0) { +obj <- as.numeric(read.table(file, nrow = 1, skip = k, colClasses = "character"))[1] +obj2 <- as.numeric(read.table(file, nrow = 1, skip = k + 1, colClasses = "character"))[1] +obj3 <- as.numeric(read.table(file, nrow = 1, skip = k + 2, colClasses = "character"))[1] +k <- k + 1 +} +write.table (Names,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=F) +library(lubridate) +library(foreign) +res <- apply(as.matrix(vect_luid), 2, function(X){cultures[match(X, numJ2000_cultures)]}) +match(X, numJ2000_cultures) +library(xts) +library(maptools) +library(maptools) +View(rgeosStatus) +force(writeSpatialShape) +force(writePolyShape) +View(as.im.SpatialGridDataFrame) +force(writePointsShape) +force(writeLinesShape) +force(writeAsciiGrid) +force(trackAzimuth) +force(unionSpatialPolygons) +force(thinnedSpatialPoly) +force(symbolsInPolys) +force(SpatialPolygons2PolySet) +View(as.owin.SpatialGridDataFrame) +View(as.linnet.SpatialLines) +force(SpatialLinesMidPoints) +force(sp2Mondrian) +force(sp2WB) +force(sp2tmap) +force(ArcObj2SLDF) +force(CCmaps) +force(checkPolygonsHoles) +force(ContourLines2SLDF) +force(gcDestination) +library(maptools) +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.shp' +shp_file <- 'hrus_irriguees_sur_Rhone.shp' +# Code canton des HRUs irriguées +HRUs <- readShapeSpatial(paste0(chemin, shp_file), proj4string=CRS ("+init=epsg:2154"), verbose=FALSE, repair=FALSE, IDvar=NULL, force_ring=FALSE, delete_null_obj=FALSE, retrieve_ABS_null=FALSE) +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.shp' +# Code canton des HRUs irriguées +HRUs <- readShapeSpatial(paste0(chemin, shp_file), proj4string=CRS ("+init=epsg:2154"), verbose=FALSE, repair=FALSE, IDvar=NULL, force_ring=FALSE, delete_null_obj=FALSE, retrieve_ABS_null=FALSE) +View(HRUs) +regroup <- cbind(HRUs$CAT,HRUs$CODE_CAN_1) +View(regroup) +View(regroup) +View(regroup) +View(regroup) +cantons <- regroup[order(regroup[,2]),] +View(cantons) +View(cantons) +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +library(raster) +un_canton <- unique(cantons[,2]) +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +shp_file <- 'hrus_irriguees_sur_Rhone.dbf' +# Code canton des HRUs irriguées +HRUs <- read.dbf(paste0(chemin, shp_file)) +regroup <- cbind(HRUs$CAT,HRUs$CODE_CAN_1) # join CAT - CODE_CAN_1 (deux id) +cantons <- regroup[order(regroup[,2]),] # sort +un_canton <- unique(cantons[,2]) # remove all duplicates +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +library(rgeos) +library(foreign) +library(sp) +library(raster) +source("lib/readwrite_functions_J2000.R") +source("lib/criteria_functions.R") +source("lib/zoo_functions.r") +source ('lib/Analyse_hrus_function.r') +source ('lib/Soil_proportion_function_library.r') +source ("lib/Soil_proportion_function_francois.r") +# source('~/exec/hydrotools/R/Ivan/stage - old/Barre_de_progression_boucle.r') +source('lib/aggregateZoo_functions.r') +source('lib/MDR_AERMCprelev.r') +source('lib/MDR_utilitaires.r') +source('lib/J2000_postprocessing_functions.R') +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source('lib/first.R') +source('lib/first.R') +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source('lib/first.R') +source('lib/first.R') +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source('lib/first.R') +#test3 +config <- 'test4_MA' #== Nom_simu +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.dbf' +chemin_sortie <- '~/JAMS/modeldata/J2K_Rhone_Irrigation/output/' # simus +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +chemin_sortie <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/' +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +pdfname <- paste0("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Comparaison_Irrig_", config, ".pdf") +chemin_sortie <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/' +pdfname <- paste0("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Comparaison_Irrig_", config, ".pdf") +library(rgeos) +library(foreign) +library(sp) +library(raster) +source('lib/first.R') +#test3 +config <- 'test4_MA' #== Nom_simu +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.dbf' +chemin_sortie <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/' +pdfname <- paste0("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Comparaison_Irrig_", config, ".pdf") +Nom_simu <- config +HRULoop <- ReadLoopDaily(paste0(chemin_sortie, Nom_simu, '/'), "HRULoop.dat", TRUE) +Dates <- HRULoop$dates +# Code canton des HRUs irriguées +HRUs <- read.dbf(paste0(chemin, shp_file)) +regroup <- cbind(HRUs$CAT,HRUs$CODE_CAN_1) # join CAT - CODE_CAN_1 (deux id) +cantons <- regroup[order(regroup[,2]),] # sort +un_canton <- unique(cantons[,2]) # remove all duplicates +# Calcul des chroniques journalières : cantonXXX_Demande, cantonXXX_Transfert, de dimensions : ncol=nb_HRUS_in_canton, nrows=Ntime +for (cant in un_canton){ +Nom <- paste0('canton', cant) +HRUs_irr <- cantons[which(cantons[,2]== cant),1] # HRUs irriguées du canton "cant" +Dem <- NULL +Transf <- NULL +for (k in HRUs_irr){ +Dem <- cbind(Dem,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationDemand')]) # L +Transf <- cbind(Transf,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationTotal')]) # L +} +assign(paste0(Nom, '_Demande'), Dem) +assign(paste0(Nom, '_Transfert'), Transf) +} +# Calcul des Demande et Transferts annuels interannuels par canton +Demande_interannuelle <- NULL +Transfert_interannuel <- NULL +for (cant in un_canton){ +Nom <- paste0('canton', cant) +obj1 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Demande')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 +obj2 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Transfert')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 +z_dem <- mean(obj1 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 +z_transf <- mean( obj2 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 +Demande_interannuelle <- rbind( Demande_interannuelle, z_dem) +Transfert_interannuel <- rbind( Transfert_interannuel, z_transf) +} +irrig_interannuelle_simu <- cbind(canton=un_canton,demande=Demande_interannuelle,transfert=Transfert_interannuel) +rownames(irrig_interannuelle_simu) <- NULL +colnames(irrig_interannuelle_simu) <- c('canton','demande','tranfert') +# *** PRELEVEMENTS AERMC *** +# -------------------------- +Prelev <- Prelev8182_1987_2007() # m3 . 81: GRAV ; 82 : non-grav +# Pour certains des cantons agricoles modélisés, les prélèvements sont nuls (eg le canton n'apparait pas dans les prélèvements de l'AERMC) +# => ajouter cette colonne aux prélèvements avec pour valeur 0 +prelev <- NULL +for (cant in un_canton){ +if (length(Prelev[which(Prelev[,1] == cant)])>0) { +prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) +} else { +prelev <- rbind(prelev,0.) +} +} +PrelevAll <- prelev # m3/yr +colnames(PrelevAll) <- 'PrelevAll' +Prelev <- Prelev82_1987_2007() +prelev <- NULL +for (cant in un_canton){ +if (length(Prelev[which(Prelev[,1] == cant)])>0) { +prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) +} else { +prelev <- rbind(prelev,0.) +} +} +PrelevNonGrav <- prelev # m3/yr +colnames(PrelevNonGrav) <- 'PrelevNonGrav' +Prelev <- Prelev8182_2008_2012() # +prelev <- NULL +for (cant in un_canton){ +if (length(Prelev[which(Prelev[,1] == cant)])>0) { +prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) +} else { +prelev <- rbind(prelev,0.) +} +} +PrelevAll_post2008 <- prelev # m3/yr +colnames(PrelevAll_post2008) <- 'PrelevAll_post2008' +# tous les cantons +comparaison <- cbind(irrig_interannuelle_simu, PrelevAll, PrelevNonGrav, PrelevAll_post2008) +save(comparaison,file= paste0('~/Documents/MDR/irrigation/RDATA/Comparaison_Irrig_', config, '.Rdata')) +save(comparaison,file= paste0('~/Documents/MDR/irrigation/RDATA/Comparaison_Irrig_', config, '.Rdata')) +save(comparaison,file= paste0('~/Documents/MDR/irrigation/RDATA/Comparaison_Irrig_', config, '.Rdata')) +# seuls ceux présents à >99% sur notre domaine +Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +# +Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1326,1327,8319,8408,8411) +# +Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) +# fichier à charger pour avoir les cultures dominantes par canton +canton_cult <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +canton_cult <- canton_cult[, c('CODE_CAN_1','LANDUSEID')] +canton_cult <- canton_cult[!duplicated(canton_cult$CODE_CAN_1),] +par (pty="m") +# Rhone +mat_Rhone <- as.matrix(comparaison[which(comparaison[,1] %in% Cantons_Rhone),2:6]) +cantonlist <- comparaison[which(comparaison[,1] %in% Cantons_Rhone),1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist,canton_cult$CODE_CAN_1),2]), start=1, stop=3) +row.names(mat_Rhone) <- paste(cantonlist, culturelist) +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.65) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen","black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2, cex.names=0.65) +par (pty="m") +# Durance +mat_Durance <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Durance), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Durance), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Durance) <- paste(cantonlist, culturelist) +petits <- (which(mat_Durance[, 3]/1000000<20)) +barplot(t(mat_Durance[petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="DURANCE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,20),cex.names=0.65) +barplot(t(mat_Durance[-petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,150),cex.names=0.65) +par (pty="m") +# Saone +mat_Saone <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Saone), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Saone), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Saone) <- paste(cantonlist, culturelist) +barplot(t(mat_Saone)/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="SAONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2,cex.names=0.65) +barplot(t(mat_Saone)/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="SAONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2,cex.names=0.65) +graphics.off() +graphics.off() +# -- Irrigation par hectare irrigué, pour détection des erreurs de surface ou pb de choix de méthode d'irrigation +culture_hru <- luid2cult(HRUs$LANDUSEID) +irrigarea_cant <- NULL # SAU irriguée par canton dans notre modélisation (proche valeurs du RGA) +culture_cant <- NULL +for (cant in un_canton){ +irrigarea_cant <- c(irrigarea_cant, sum(HRUs$AREA[which(HRUs$CODE_CAN_1 ==cant)])/10000.) #hectares +culture_cant <- c(culture_cant, unique(culture_hru[which(HRUs$CODE_CAN_1 ==cant)])) +} +comparaison_surf <- cbind(un_canton, irrig_interannuelle_simu[, 2:3]%/%irrigarea_cant, PrelevAll%/%irrigarea_cant) +Rhone <- comparaison_surf[which(comparaison_surf[, 1] %in% Cantons_Rhone),] +cultures_Rhone <- culture_cant[which(un_canton %in% Cantons_Rhone )] +N <- length(unique(cultures_Rhone)) #==> 5 types de culture : "Mais" "Prairies" "Vergers" "Vigne" "maraichage" +culture_locale <- unique(cultures_Rhone) +pdfname <- paste0("~/Documents/MDR/irrigation/ComparaisonSurf_Rhone_", config, ".pdf") +pdf(pdfname,paper="special",width=8,height=14) +layout(matrix(1:N, N, 1)) +par (pty="m") +for (cult in 1:N){ +mat <- as.matrix(Rhone[which(cultures_Rhone==culture_locale[cult]), 2:4]) +row.names(mat) <- Rhone[which(cultures_Rhone==culture_locale[cult]), 1] +barplot(t(mat), beside = TRUE, col = c("red", "blue", "green"), legend.text = TRUE, main= paste0('Rhone , ', culture_locale[cult]), xlab='cantons', ylab='m3 / hectare', las=2) +} +graphics.off() +mat_Rhone <- as.matrix(comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 2:5]) +row.names(mat_Rhone) <- comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 1] +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.75) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2) +# fichiers pour extraction des MA : +simufile <- '~/JAMS/modeldata/J2K_Rhone_Barrages/output/BAR/' +filename <- 'ReachLoop.dat' +# le vieux et nouveau Reach.par +paramdir <- '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/' +oldparfile <- 'reach.par' +newparfile <- 'reach_MA.par' # avec nouveau param MA +# code +rloop <- ReadLoopDaily(simufile, filename, FALSE) +MA <- NULL +Nbreach <- dim(rloop$Data)[1]/length(rloop$dates) +Nbtime <- length(rloop$dates) +Ntot <- dim(rloop$Data)[1] +for (i in (1:Nbreach)){ # 1 to 3075 reaches +index <- seq(i, Ntot, Nbreach) +chronique <- rloop$Data[index, 2] +MA_tmp <- mean(chronique) +MA <- c(MA, MA_tmp) # L/d +} +ID <- rloop$Data[1:Nbreach, 1] +MA <- rbind(ID, MA) +reaches <- Chargement_param(oldReachParfile, parfile) +order <- match(ID, reaches$V1) # l'ordre des simus est inversé par rapport à l'ordre du reach.par... +MA <- MA[, order] +newparamName <- "MA" +newparamVal <- round(MA[2,]) +newparamUnit <- "L/d" +add_param(paramdir,oldparfile,newparfile,newparamName,newparamVal,newparamUnit) +# ex : cantons (retenu) 3909 (reach6222) : forte baisse de la demande suite à introduction de cette paramétrisation. +simnewdir <- paste0(chemin_sortie, config, "/") +filename <- 'ReachLoop.dat' +simrefdir <- '~/JAMS/modeldata/J2K_Rhone_Natural_Hydrology/output/newREF/' +simolddir <- paste0(chemin_sortie, 'test3', "/") +myreach <- 6222 +# avec MA +rloopnew <- ReadLoopDaily(simnewdir, filename, FALSE) +runoffnew <- rloopnew$Data[which(rloopnew$Data[, 1]==myreach), 'simRunoff'] +runoffnew <- xts(runoffnew, as.POSIXct(rloopnew$dates, format='%Y-%m-%d')) +# Hydro Nat +rloopref <- ReadLoopDaily(simrefdir, filename, FALSE) +runoffref <- rloopref$Data[which(rloopref$Data[, 1]==myreach), 'simRunoff'] +runoffref <- xts(runoffref, as.POSIXct(rloopref$dates, format='%Y-%m-%d')) +# irrig sans MA +rloopold <- ReadLoopDaily(simolddir, filename, FALSE) +runoffold <- rloopold$Data[which(rloopold$Data[, 1]==myreach), 'simRunoff'] +runoffold <- xts(runoffold, as.POSIXct(rloopold$dates, format='%Y-%m-%d')) +MA <- mean(runoffref) +MA <- mean(runoffref) +MAts <- xts(rep(MA, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) +MA10ts <- xts(rep(MA*.1, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) +year <- '2009' +period <- paste0(year, '-05-01/', year, '-10-31') +legend("topright",legend=names(variablesfut),y.intersp = 1, lty= 1,bty="n",col = colors,xpd=NA,cex=0.8) +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +source("~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r") +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(combinat) +library(foreign) +# ------------------------------------------------------------------------ +# index_of_nearest <- function(x, number){ +# Finds the index of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The index of the element in x that is closest to number +# ------------------------------------------------------------------------ +index_of_nearest <- function(x, number){ +return (which(abs(x-number)==min(abs(x-number))))} +# ------------------------------------------------------------------------ +#value_of_nearest(c(5,2,1),6) +# value_of_nearest <- function(x, number) +# Finds the value of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The value of the element in x that is closest to number +# ------------------------------------------------------------------------ +value_of_nearest <- function(x, number){ +return (x[which(abs(x-number)==min(abs(x-number)))])} +# Args: +# n: The current number of HRUs to be added to the combination +# S_HRUs: A vector of HRUs +# S_irr_Canton: The target irrigation area +# tolerance: The maximum error tolerated by the user +# Returns: +# The index of the HRUs that best fit the target irrigation area, +# " continue " if the current combination does not work but a smaller combination might, +# " non convergence " if the current combination does not work and neither does a smaller combination. +# ------------------------------------------------------------------------ +try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance){ +# { +if (n < length(S_HRUs)){ +combi <- combn(S_HRUs, n) +} else { +combi <- t(t(S_HRUs)) # TODO is transposing it twice a good idea? isn't it useless? +} +sumcombi <- apply(combi, 2, sum) +# } TODO from testing, all this part is useless. need confirmation tho ~~~~ +nearestarea <- value_of_nearest(sumcombi, S_irr_Canton) +error_nearest <- abs(1-nearestarea/S_irr_Canton)*100. +if (error_nearest[1] < tolerance){ +combi_selected <- index_of_nearest(sumcombi, S_irr_Canton) +index_selected <- NULL +for (i in 1:n){ +index_selected <- c(index_selected, which(S_HRUs==combi[, combi_selected][i])) +} +return (index_selected) +} else if (min(sumcombi) > S_irr_Canton){ +if (n==1){ +return(which(sumcombi==min(sumcombi))) +} else { +return ("non convergence") +} +} else { +return ("continue") +} +} +# ------------------------------------------------------------------------ +# main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) +# Main function of the irrigation assignment process. +# Args: +# hrus_irrig_cantons_filePath: The path to the HRUs irrigated cantons data file +# cantons_irrigues_filePath: The path to the cantons irrigated data file +# Returns: +# A file with the irrigation status of every HRU +# ------------------------------------------------------------------------ +main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) { +hrus_irrig_cantons <- read.dbf(hrus_irrig_cantons_filePath) +cantons_irrigues <- read.dbf(cantons_irrigues_filePath) +N_hru <- dim(hrus_irrig_cantons)[1] +# creates two vector of the size of the number of currently irrigated HRUs +irrigated <- rep(0, N_hru) +area_error <- rep(0, N_hru) +# creates a vector of the size of the number irrigated cantons +canton_traite <- rep(0, dim(cantons_irrigues)[1]) +tolerances <- c(10, 30, 100) +for(tolerance in tolerances) { +for (numcanton in cantons_irrigues$CODE_CAN_1[which(canton_traite==0)]){ +# TODO is this useful in any way ? indice_canton is just the index of numcanton rn, there must be a less heavy method +indice_canton <- which(cantons_irrigues$CODE_CAN_1==numcanton) +# Find the HRU of the current canton +hrus <- hrus_irrig_cantons[which(hrus_irrig_cantons$CODE_CAN_1==numcanton), ] +if (dim(hrus)[1]<=0){ +canton_traite[indice_canton] <- 1 +} else { +indices <- which(hrus_irrig_cantons$CODE_CAN_1==numcanton) # trouve le(s) HRU(s) associe au canton etudie +S_HRUs <- hrus$AREA # surface du/des HRU(s) en m2 +S_irr_Canton <- cantons_irrigues[which(cantons_irrigues$CODE_CAN_1==numcanton), ]$SAU_IRR*100. # le "*100" lie au fait que les donnees du RGA sont en ares = 100m2 +index_of_HRUs <- "continue" +n_elements_combi <- 1 +while ((index_of_HRUs=="continue") && (n_elements_combi <= length(S_HRUs))){ +index_of_HRUs <- try_combination(n_elements_combi, S_HRUs, S_irr_Canton, tolerance) +n_elements_combi <- n_elements_combi+1 +} +if (index_of_HRUs=="non convergence" || index_of_HRUs=="continue") { +irrigated[indices] <- NA +} else { +irrigated[indices] <- 0 +irrigated[indices[index_of_HRUs]] <- 1 +area_error[indices] <- (sum(S_HRUs[index_of_HRUs])/S_irr_Canton-1)*100. +canton_traite[indice_canton] <- 1 +} +} +} +} +irrig_type <- rep(0, N_hru) +# il est entrain de mettre un vector dans chaque case du vector là , je me trompe ? +irrig_type[which(irrigated >0)] <- hrus_irrig_cantons$IRRIG_TYPE[which(irrigated >0)] +# TODO put it in the right folder +write.table(cbind(hrus_irrig_cantons$CAT,hrus_irrig_cantons$AREA, irrigated, irrig_type, area_error),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv',append=F, sep="\t", row.names=FALSE, col.names=c('HRUnum', 'HRUarea', 'irrigated', 'irrig_type', 'area_error')) +} +main('/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf', +'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf' +) +# TODO what is this ? -> +# thats is probably not the right file +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# # TODO put it in the right folder +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# # TODO put it in the right folder +# write.table(cbind(sort(unique(HRU_Aleatoir$CODE_CAN_1)),SHRUirr_can),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Bilan_HRU_Aleatoir.txt',append=F, sep="\t", row.names=FALSE, col.names=c('canton', 'HRUirrig_area')) +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# # TODO put it in the right folder +# write.table(cbind(sort(unique(HRU_Aleatoir$CODE_CAN_1)),SHRUirr_can),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Bilan_HRU_Aleatoir.txt',append=F, sep="\t", row.names=FALSE, col.names=c('canton', 'HRUirrig_area')) +main('/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf', +'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf' +) +# The point of this function is to matdch a vector of numbers with a vector of strings. +# ------------------------------------------------------------------------------ +luid2cult <- function(vect_luid) { +cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") +numJ2000_cultures <- c(19:31) +res <- apply(as.matrix(vect_luid), 2, function(X){cultures[match(X, numJ2000_cultures)]}) +as.vector(res) # Not sure this is necessary... +} +vec <- luid2cult(c(1:5)) +vec <- luid2cult(c(1:5)) diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/debug-breakpoints.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/debug-breakpoints.pper new file mode 100644 index 0000000000000000000000000000000000000000..4893a8a7c977f8819a9ea9bff0ed5b7b2deca480 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/debug-breakpoints.pper @@ -0,0 +1,5 @@ +{ + "debugBreakpointsState": { + "breakpoints": [] + } +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/files-pane.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/files-pane.pper new file mode 100644 index 0000000000000000000000000000000000000000..d8d46b31e05bacd206656866ae2ceac8af8da301 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/files-pane.pper @@ -0,0 +1,9 @@ +{ + "sortOrder": [ + { + "columnIndex": 2, + "ascending": true + } + ], + "path": "~/Bureau/maestro/irrigation-R-codes/lib" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/find-replace-in-files.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/find-replace-in-files.pper new file mode 100644 index 0000000000000000000000000000000000000000..793a28c18217178156123d518c157e5ec11e8e3b --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/find-replace-in-files.pper @@ -0,0 +1,15 @@ +{ + "dialog-state": { + "query": "AleatoirIrrig_hrus", + "path": "", + "regex": false, + "caseSensitive": false, + "wholeWord": false, + "filePatterns": [], + "excludeFilePatterns": [], + "resultsCount": 3, + "errorCount": 0, + "replaceErrors": "", + "projectRelative": true + } +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/source-pane.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/source-pane.pper new file mode 100644 index 0000000000000000000000000000000000000000..902cc6f8a1c4c64f44276e0450d167eb64c2e4a3 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/source-pane.pper @@ -0,0 +1,3 @@ +{ + "activeTab": 0 +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/windowlayoutstate.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/windowlayoutstate.pper new file mode 100644 index 0000000000000000000000000000000000000000..0b310c03b954e75552881296eebb71eb39f0f482 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/windowlayoutstate.pper @@ -0,0 +1,14 @@ +{ + "left": { + "splitterpos": 181, + "topwindowstate": "NORMAL", + "panelheight": 609, + "windowheight": 646 + }, + "right": { + "splitterpos": 387, + "topwindowstate": "NORMAL", + "panelheight": 609, + "windowheight": 646 + } +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/pcs/workbench-pane.pper b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/workbench-pane.pper new file mode 100644 index 0000000000000000000000000000000000000000..75e70e94fd86ec52381e5df6ebd227a3255fe736 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/pcs/workbench-pane.pper @@ -0,0 +1,5 @@ +{ + "TabSet1": 0, + "TabSet2": 0, + "TabZoom": {} +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/persistent-state b/irrigation-R-codes/.Rproj.user/4644B31D/persistent-state new file mode 100644 index 0000000000000000000000000000000000000000..0eb3fb5d54369801ed3cb22c2401c916f89a2718 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/persistent-state @@ -0,0 +1 @@ +activeEnvironmentName="R_GlobalEnv" diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/rmd-outputs b/irrigation-R-codes/.Rproj.user/4644B31D/rmd-outputs new file mode 100644 index 0000000000000000000000000000000000000000..3f2ff2d6cc8f257ffcade7ead1ca4042c0e884b9 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/rmd-outputs @@ -0,0 +1,5 @@ + + + + + diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/saved_source_markers b/irrigation-R-codes/.Rproj.user/4644B31D/saved_source_markers new file mode 100644 index 0000000000000000000000000000000000000000..2b1bef112ac6921abda6162a65dbfcd8c6d55c80 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/saved_source_markers @@ -0,0 +1 @@ +{"active_set":"","sets":[]} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4 new file mode 100644 index 0000000000000000000000000000000000000000..3bd021ff17a3990f3544f90557e34e32021dfada --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4 @@ -0,0 +1,26 @@ +{ + "id": "0648EED4", + "path": "~/Bureau/maestro/irrigation-R-codes/MDR_select_culture_irriguée.r", + "project_path": "MDR_select_culture_irriguée.r", + "type": "r_source", + "hash": "2290479314", + "contents": "", + "dirty": false, + "created": 1652863293873.0, + "source_on_save": false, + "relative_order": 4, + "properties": { + "source_window_id": "", + "Source": "Source", + "cursorPosition": "146,0", + "scrollLine": "0" + }, + "folds": "", + "lastKnownWriteTime": 1652876534, + "encoding": "UTF-8", + "collab_server": "", + "source_window": "", + "last_content_update": 1652876534965, + "read_only": false, + "read_only_alternatives": [] +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4-contents b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4-contents new file mode 100644 index 0000000000000000000000000000000000000000..a3edeea91c2b08b2c95e344b5bf3ff0902a2610d --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/0648EED4-contents @@ -0,0 +1,202 @@ +#***** SCRIPT déterminant la culture dominante à affecter à une HRU ******* +# *** en fonction des données de culture irriguées du RGA et *** +# * du besoin en eau théorique de chaque culture présente * + + +# auteur : IG +# date : 15-12-2015 + +# -------------------------------------------------------------------------- +library(gdata) +library(lubridate) + +source('lib/utilitaire_irrigation.R') + + +# 1. Rassembler les différentes sources de données +# ************************************************ + +# RGA +#-------------- + +RGA <- read.xls('~/Documents/MDR/irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx', sheet = 3) +cantons <- RGA[, 1] +cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère", "Divers", "Industrielles") +RGA <- RGA[, c(19:26, 28, 30, 31, 29, 27)]# colonnes irriguées, 13 types de culture. ACGTUNG !!! les colonnes ne sont pas dans le meme ordre que dans J2000 !! +rownames(RGA) <- cantons +colnames(RGA) <- cultures + + +# param de J2000 +#----------------- + +# nom abrégé et numéro des cultures +# cette manière de faire est fragile, si les données venait à changer, il faudrait changer le code +numJ2000_cultures <- 19:31 +numnomcultures <- rbind(cultures, numJ2000_cultures) + +# Kc mensuels par cultures +luparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'landuse.par') +kc <- luparam[19:31, 3:14] +colnames(kc) <- 1:12 +rownames(kc) <- cultures + +# Période d'irrigation, transformées en fraction mensuelles +irrigstart <- luparam[19:31, 36] +irrigend <- luparam[19:31, 37] + # on met des valeurs réalistes là où pas d'info de base +# on dirait que cette partie de code à été fait pour des données très précise, pas sur que ça marche avec autre chose +irrigstart[9] <- irrigstart[6]; irrigend[9] <- irrigend[6] # prot==pdt +irrigstart[10] <- 100; irrigend[10] <- 250 # riz +irrigstart[11] <- irrigstart[2]; irrigend[11] <- irrigend[2] # jach et autres = mais +irrigstart[12] <- irrigstart[7]; irrigend[12] <- irrigend[7] # jardins et autres == vergers +irrigstart[13] <- irrigstart[7]; irrigend[13] <- irrigend[7] # industrielles == prairies + +date1 <- ymd_hms("2000/01/01 00:00:00") +irrigperiod <- interval(as.Date(irrigstart, date1), as.Date(irrigend, date1)) +debutmois <- c(date1, date1+months(1:11)) +finmois <- date1+months(1:12) +monthsperiods <- interval(debutmois, finmois) + +wheightedperiod <- NULL +for (cult in seq_along(cultures)){ + wheightedperiod <- rbind(wheightedperiod, as.period(intersect(monthsperiods, irrigperiod[cult]))/months(1)) +} +wheightedperiod[which(is.na(wheightedperiod))] <- 0 # rmqs : le calcul n'est pas tout à fait exact en raison de la conversion imprécise JulianDay -> Date + # pour la période d'irrigation ==> à améliorer. + + +# ETO mensuelle interannuelle par hru irriguée +# -------------------------------------------- +# This part of the program reads in a file of data on irrigated HRUs and loops through each HRU to calculate the monthly reference evapotranspiration. +# It outputs a file called "HRULoop.dat" with the monthly reference evapotranspiration for each HRU. +# It also assigns a variable to each HRU's monthly reference evapotranspiration, with the variable name being "refET_" followed by the HRU number. +# So, for example, if HRU 1 had a monthly reference evapotranspiration of 3 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_1" with the value 3. +# If HRU 2 had a monthly reference evapotranspiration of 4 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_2" with the value 4. +# And so on. +# The program does not produce any visual output. +# It is important to note that this program requires the source('lib/aggregateZoo_functions.r') in order to run properly. +# +# - liste des HRUs irriguées +hrus_all <- read.csv('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/Irrigated_AleatoireHRUselect.csv') +irrigated <- hrus_all[which(hrus_all$irrigated ==1),] + +HRULoop <- ReadLoopDaily('~/JAMS/modeldata/J2K_Rhone_Irrigation/output/refET/',"HRULoop.dat",TRUE) +Dates <- HRULoop$dates + +# - ET0 mensuelles interannuelles +for (myhru in irrigated[,1]){ + myrefET <- HRULoop$Data[which(HRULoop$Data[, 1]==myhru), which(colnames(HRULoop$Data)=='refET')] + myrefET <- aggregateZoo(zoo(myrefET, Dates), 'm', 'mean') + assign(paste0('refET_', myhru), myrefET) +} + +# 2. Comparaison des besoins théoriques sur chaque HRU et affectation du type de culture irrigué +# *********************************************************************************************** + +hrus_et_cantons <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus_decoupees.dbf') +culture_finale <- NULL +for (hrus in hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)]){ + + un_canton <- hrus_et_cantons$CODE_CAN_1[which(hrus_et_cantons$CAT==hrus)] + sommeprod <- (as.matrix(kc) *as.matrix(wheightedperiod)) %*% as.vector(get(paste0('refET_', hrus))) # une valeur par culture + refETmoyyear_ponderee <- t(sommeprod)*RGA[as.character(un_canton), 1:13] # .. pondérée par la surface en culture sur le canton. + culture_retenue <- cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + numculture_retenue <- numJ2000_cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + culture_finale <- c(culture_finale, numculture_retenue) +} + + + +# 3. modifications du fichiers hrus.par et des .dbf +# *************************************************** + +# hrus.par +#------------- +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'hrus.par') +culture_init <- hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] +#culture_finale[which(culture_init!=culture_finale)] +corresp_irrigtype <- c(2, 1, 1, 1, 2, 1, 2, 1, 2, 3, 1, 1, 2) +irrigtype <- NULL +for (cult in culture_finale){ + ind <- which(numJ2000_cultures==cult) + irrigtype <- c(irrigtype, corresp_irrigtype[ind]) +} +# 11: landuseID +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] <-culture_finale +# 15: irrigated +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 15] <- 1 +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 15] <- 0 +# 16: irrig_type +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 16] <- irrigtype +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 16] <- 0 + + +# on remet à 4 les landuseID agricoles des HRUs qui ne sont plus irriguées maintenant (proposition pour plus tard : on met la culture dominante non-irriguée, pour prendre en compte des Kc améliorés) +# c'est un peu compliqué car on n'a plus le hrus.par de référence sans irrigation.... +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ) & (hruparam$V11>18) ), 11] <-4 +# step 1: Montagne (V4 : slope ; V11: landuseID) +indices <- which((hruparam$V4 > 10) & (hruparam$V11== 4)) +if (length(indices !=0)){ + hruparam[indices,11] <-12} + +# step 2: Dombes +Dombes_Chalaronne <- 6832 +Dombes_Veyle <- 6800 + +reach <- Chargement_param ('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','reach.par') +indID <- 1 +indLand <- 11 +indSub <- 9 + +brins_chala <- Topologie(Dombes_Chalaronne, reach) +brins_veyle <- Topologie(Dombes_Veyle, reach) + +Total_hru_Chala <- NULL +for (k in brins_chala){ + Total_hru_Chala <- c (Total_hru_Chala,hruparam[hruparam[,indSub] == k,indID])} +Total_hru_Veyle <- NULL +for (k in brins_veyle){ + Total_hru_Veyle <- c (Total_hru_Veyle,hruparam[hruparam[,indSub] == k,indID])} + +for (k in Total_hru_Chala){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ])) != 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +for (k in Total_hru_Veyle){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ]))!= 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +#test : which(hruparam$V11 == 18) +write_new_paramfile('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus.par', hruparam, '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus_AleatoirIrrig_NewCult.par') + + + +# .dbf +#------------- + +# dbf decoupee sur irrig +hrus_et_cantons$LANDUSEID[order(hrus_et_cantons$CAT)]<-culture_finale +hrus_et_cantons$IRRIG_TYPE[order(hrus_et_cantons$CAT)]<-irrigtype + +# dbf de toutes les hrus +hrus_irrigation_all <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus.dbf') +for (hrus in hrus_et_cantons$CAT){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$LANDUSEID[which(hrus_et_cantons$CAT == hrus)] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$IRRIG_TYPE[which(hrus_et_cantons$CAT == hrus)] +} +hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT)] <-1 +hrus_irrigation_all$IRRIGATED[which(!(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT))] <-0 + +# 4/1/2015 correction ex-post pour rétablir 4, 12 ou 18 selon agri plaine, montagne, dombes: +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','hrus_AleatoirIrrig_NewCult.par') +for (hrus in hruparam$V1){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),11] + hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),15] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),16] +} +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_corr.dbf') + + +# écriture +write.dbf(hrus_et_cantons, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus.dbf') + diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA new file mode 100644 index 0000000000000000000000000000000000000000..be8af6105ece51aca750258c6ee699a43d59b7e3 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA @@ -0,0 +1,26 @@ +{ + "id": "19A163EA", + "path": "~/Bureau/maestro/irrigation-R-codes/lib/utilitaire_irrigation.R", + "project_path": "lib/utilitaire_irrigation.R", + "type": "r_source", + "hash": "1135473089", + "contents": "", + "dirty": false, + "created": 1652798796097.0, + "source_on_save": false, + "relative_order": 1, + "properties": { + "source_window_id": "", + "Source": "Source", + "cursorPosition": "177,48", + "scrollLine": "169" + }, + "folds": "", + "lastKnownWriteTime": 1652948120, + "encoding": "UTF-8", + "collab_server": "", + "source_window": "", + "last_content_update": 1652948120, + "read_only": false, + "read_only_alternatives": [] +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA-contents b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA-contents new file mode 100644 index 0000000000000000000000000000000000000000..bb249835e43ad61269626a7c0bec441040c0d48e --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/19A163EA-contents @@ -0,0 +1,455 @@ +# ============================================================================== +# By Theo L. intern at INRAE +# CREATED on May 16, 2022 +# +# +# ------------------------------------------------------------------------------ +# the objective is to regroup the essential function of the MDR_irrigated project +# in an easy to maintain and well documented file +# +# most of the functions that are in this file are comming from another R source +# file, to keep track of those the "From" boxes indicate the name of the original +# file +# ============================================================================== + +library(zoo) +library(xts) + + + +# ========================== +# ** From MDR_utilitaires ** +# ========================== + + +# -------------------------------add_param-------------------------------------- +# **** add an extra parameter to reach.par + +# add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) +# Adds a new parameter to the given reach file, with the given value and unit. +# Args: +# inputdir: The input directory +# oldreachfile: The old reach file +# newreachfile: The new reach file +# newparamName: The name of the new parameter +# newparamVal: The value of the new parameter +# newparamUnit: The unit of the new parameter + +# The point of this function is to add a new parameter to an oldreachfile and create a newreachfile with the new parameter included. +# The function takes the inputdir (input directory), oldreachfile, newreachfile, newparamName, newparamVal, and newparamUnit as arguments. +# it then starts by finding the number of lines in the oldreachfile, then reads the header line and finds the line where the ID is located. +# Then the function reads in the oldreachfile, adds the new paramName to the file, and creates a new Min, Max, and Unit file with the new parameter included. +# Finally, the function writes the newreachfile with the new parameter included. +# ------------------------------------------------------------------------------ +add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) { + + nbLines <- skip_lines(inputdir,oldreachfile) + headerReach <- readLines(paste0(inputdir, oldreachfile), n = nbLines) + LinesNames <- which(substr(headerReach,1,2)=="ID") + Names <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames-1) + Names <- cbind(Names,newparamName) + Min <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames) + Min <- cbind(Min,0,0) + Max <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+1) + Max <- cbind(Max,9999999,9999999) + Unit <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+2) + Unit <- cbind(Unit,newparamUnit) + reach <- Chargement_param(inputdir,oldreachfile) + reach <- cbind(reach,newparamVal) + + write.table (Names, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=F) + write.table (Min, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Max, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Unit, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (reach, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) +} + + +# -------------------------------Chargement_param------------------------------- +# Chargement_param <- function(chemin,Name) +# Loads all parameters from a given file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The parameters as a data frame + +# - The code is able to identify the line with the first values and skip the initial text lines +# Caveats: it may not work for files with less than 3 lines of data. +# ------------------------------------------------------------------------------ +Chargement_param <- function(chemin, Name) { + # initialization + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + + # loop until we find a line with 3 numeric value + while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0) { + + obj <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k, colClasses="character"))[1] + obj2 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+1, colClasses="character"))[1] + obj3 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+2, colClasses="character"))[1] + k <- k+1 + } + + # get the number of line to skip to get the data + nbLines <- k - 1 + + # load the data + data <- read.table(paste0(chemin, Name), skip=nbLines) + mycolnames <- apply(read.table(paste0(chemin, Name), nrow=1)[1,], 1, as.character) + colnames(data) <- mycolnames + + return(data) +} + + +# -------------------------------write_new_paramfile---------------------------- +# write_new_paramfile=<-(oldfile, newvalues ,newfile) +# Writes the new combination of HRUs to the original parameter file. +# Args: +# oldfile: The original parameter file +# newvalues: A vector of the HRUs to write to the new parameter file +# newfile: The new parameter file + +# write a modified params file using the header of the old one +# ------------------------------------------------------------------------------ +write_new_paramfile <- function(oldfile, newvalues ,newfile) { + # get the header + nb_lines <- skip_lines(oldfile) + header <- readLines(oldfile, n = nb_lines) + + # write + write.table(header, newfile, sep = '\t', col.names = F, row.names = F, quote = F) + write.table(newvalues, newfile, col.names = F, row.names = F, quote = F, append = TRUE, sep = '\t') +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(file) +# Finds the number of lines to skip before the data starts in file. +# Args: +# file: The file to be read +# Returns: +# The number of lines to skip before the data starts in file +# ------------------------------------------------------------------------------ +skip_lines <- function(file){ + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + while (length(na.omit(obj)) == 0 | length(na.omit(obj2)) == 0 | length(na.omit(obj3)) == 0) { + + obj <- as.numeric(read.table(file, nrow = 1, skip = k, colClasses = "character"))[1] + obj2 <- as.numeric(read.table(file, nrow = 1, skip = k + 1, colClasses = "character"))[1] + obj3 <- as.numeric(read.table(file, nrow = 1, skip = k + 2, colClasses = "character"))[1] + + k <- k + 1 + } + return(k - 1) +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(chemin, Name) +# Finds the number of lines to skip in order to reach the data in a file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The number of lines to skip +# ------------------------------------------------------------------------------ +skip_lines <- function(chemin, Name) { + return(skip_lines(paste0(chemin, Name))) +} + + +# -------------------------------luid2cult-------------------------------------- +# luid2cult <- function(vect_luid) +# Converts a vector of J2000 culture codes to their corresponding name. +# Args: +# vect_luid: A vector of J2000 culture codes +# Returns: +# A vector containing the corresponding names + +# The point of this function is to match a vector of numbers with a vector of strings. +# ------------------------------------------------------------------------------ +luid2cult <- function(vect_luid) { + cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") + numJ2000_cultures <- 19:31 + res <- apply(as.matrix(vect_luid), 2, function(X){cultures[match(X, numJ2000_cultures)]}) + return(as.vector(res)) # Not sure this is necessary... +} + +vec <- luid2cult(18:25) + + +# =================================== +# ** From readwritefunctions_J2000 ** +# =================================== + +# -------------------------------ReadLoopDaily---------------------------------- +# ReadLoopDaily <- function(folder, file,filtre) +# Reads a "daily file" and returns the data and the corresponding dates. +# Args: +# folder: The folder where the file is located +# file: The name of the file +# filtre: A logical indicating whether to filter the data or not +# Returns: +# A list containing the dates and the data + +# 1. it starts by reading the length of the first data block and the number of blocks in the file +# 2. then it reads the file's headers (column names) +# 3. finally it reads the data block by block, adding a day to the date at each block, until the end of the file +# ------------------------------------------------------------------------------ +ReadLoopDaily <- function(folder, file, filtre) { + # Open the file + con <- file(paste0(folder, file)) + open(con) + + # Be careful as we remain in the same connection we must count the lines from the current line read (not from the beginning of the file) + + # Read the nb of elements (HRUs or Reaches) (length of the blocks) + Lblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Lblocks <- Lblocks[,3] + + # Get the nb of time steps fo the simulation (nb of blocks of the file) + Nblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Nblocks <- Nblocks[,3] + + # Get the col names (names of the simulated variables) + if (filtre == T) {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3)} else {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2)} + #Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2) + + # Get the start date of the simulation (we consider only the date -> daily time step) + Datestart <- read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + Datestart <- Datestart[,2] + if (filtre == T) { + read.table(con, nrows = 1, sep = "\t") + count <- length(Colnames)+1 + compt <- 0 + while (count == (length(Colnames)+1)) { + obj <- read.table(con, nrows = 1, sep = "\t") + count <- dim(obj)[2] + compt <- compt + 1 + } + Lblocks <- compt-1 + con<-file(paste0(folder, file)) + open(con) + read.table(con, nrows = 1, sep = "\t", skip = 1) + read.table(con, nrows = 1, sep = "\t", skip = 1) + Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3) + # Get the start date of the simulation (we consider only the date -> daily time step) + read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + } + + # Read the data + # Initialisation of a matrix of the correct size to store the data + # nrow = nb of time steps * nb of elts + # ncol = nb of simulated variables + ncol <-length(Colnames) + Data <- matrix(nrow=Nblocks*Lblocks,ncol=ncol) + # Loop on the nb of blocks + for (i in 0:(Nblocks -1)) + { + # Read the block of data + # if i=0 (first block skip only 1 line) + if(i==0) + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 1, colClasses="numeric") + # else skip 3 lines + else + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 3, colClasses="numeric") + + # Add the values to the matrix + Data[(i*Lblocks+1):((i+1)*Lblocks),1:ncol] <- as.matrix(Datatemp) + } + + # close the file + close(con) + + # Add the colnames + colnames(Data) <- Colnames + # Create the corresponding vector of dates + dates <- as.character(seq(from = as.Date(Datestart), length.out = Nblocks, by = "day")) + + # Return the vector of dates and the data as a list + list(dates=dates, Data=Data) +} + + + +# ======================== +# ** From zoo_functions ** +# ======================== + + +# -------------------------------aggregateZoo----------------------------------- +# aggregateZoo <- function (z, timeStep, sumOrMeanFunction) +# Aggregates the given zoo object over the given time step. +# Args: +# z: A zoo object +# timeStep: The time step over which to aggregate the zoo object +# Possible values: ["dmy","my","m","sy","s"] +# sumOrMeanFunction: The function to apply to the aggregated zoo object +# Possible values: [sum,mean] +# Returns: +# The aggregated zoo object + +# The point of this R function is to aggregate data over different time steps. +# The different time steps that are supported are "dmy", "my", "m", "sy", and "s". +# For each time step, the function will either take the sum or mean of the data. +# ------------------------------------------------------------------------------ +aggregateZoo <- function (z, timeStep, sumOrMeanFunction) { + #Retourne un nouveau objet zoo aggr?g? sur le pas de temps timeStep + #(["dmy","my","m","sy","s"]) en faisant la somme ou la moyenne ([sum,mean]) + + if(timeStep == "dmy"){ + return (aggregate(z, time(z) - as.numeric(time(z)) %% 1, sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "my"){ + return (aggregate(z, as.Date(as.yearmon(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "y"){ + return (aggregate(z, format(as.Date(index(z)), '%y'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "m"){ + return (aggregate(z, format(as.Date(index(z)), '%m'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "sy"){ + return (aggregate(z, as.Date(as.yearqtr(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "s"){ + return (aggregate(z, quarters(time(z)), sumOrMeanFunction,na.rm = TRUE)) + } + + print("Type not understood") +} + + + +# =================================== +# ** From functions_post_treatment ** +# =================================== + + +# -------------------------------Topologie-------------------------------------- +# Topologie <- function (brin,reach) +# find all of the reachable nodes from a given node in a graph +# Args: +# brin: The index of the given HRU | brin pour lequel on veut la topologie +# reach: A matrix containing the indices of all HRUs and their downstream HRUs | le fichier parametre reach.par charge +# Returns: +# A vector containing the indices of all HRUs upstream of the given HRU | la liste des brins en amont du brin choisi + +# take in a vector of reachable nodes from a given node, and return a vector of all nodes that can be reached from the original node. +# Remontee depuis le brin choisi jusqu'a l'amont du bassin +# ------------------------------------------------------------------------------ +Topologie <- function (brin, reach) { + IDs <- NULL + Brin0 <- brin + + for (indice in 1:1000){ + assign(paste0('Brin', indice), NULL) + } + k <- 0 + + while (length(get(paste0('Brin', k)))!=0){ + for (i in seq_along(get(paste0('Brin', k)))){ + assign(paste0('Brin', k + 1), c(get(paste0('Brin', k + 1)), reach[which(reach[, 2]== get(paste0('Brin', k))[i]), 1])) + } + k <- k+1 + } + Total <- brin + for (l in 1:k){ + Total <- unique(c(Total,get(paste0('Brin', l)))) + } + Total +} + + + +# ========================== +# ** From MDR_AERMCprelev ** +# ========================== + + +# -------------------------------Prelev82_1987_2007----------------------------- +# Prelev82_1987_2007 <- function() +# Calculates the mean annual water withdrawal across all cantons over 1987-2007 +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding mean annual water withdrawal +# ------------------------------------------------------------------------------ +Prelev82_1987_2007 <- function() { # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev82 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev82 <- rbind(Prelev82, mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev82data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev82) + colnames(Prelev82data) <- c('canton', 'Prelev82') + + return(Prelev82data) +} + + +# -------------------------------Prelev8182_1987_2007--------------------------- +# Prelev8182_1987_2007 <- function() +# Calculates the mean annual water withdrawals from 1981 to 1982 for each canton. +# Args: +# None +# Returns: +# A dataframe with the canton in the first column and the mean annual water withdrawals in the second column +# ------------------------------------------------------------------------------ +Prelev8182_1987_2007 <- function(){ # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev81_ann <- Prelev[which(Prelev[, 1] == cant), 4] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) + + Prelev8182 <- rbind(Prelev8182, mean(Prelev81_ann["1987/2007"])+mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) + colnames(Prelev8182data) <- c('canton', 'Prelev8182') + + return(Prelev8182data) +} + + +# -------------------------------Prelev8182_2008_2012--------------------------- +# Prelev8182_2008_2012 <- function() +# Finds the average annual water withdrawals for all cantons from 2008-2012. +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding average annual water withdrawals +# ------------------------------------------------------------------------------ +# m3/yr +Prelev8182_2008_2012 <- function(){ + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) # create a data.frame from the .txt file + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) create a vector with all the annual data of the first canal for the canton cant + Prelev81_ann <- Prelev[which(Prelev[,1] == cant),4] * 1000 #(m3) same but with the second canal + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") # create a vector with the date of all the data + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) # create a time series with the data of the first canal and their date + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) # same with the second canal + + Prelev8182 <- rbind(Prelev8182,mean(Prelev81_ann["2008/2012"])+mean(Prelev82_ann["2008/2012"])) # add to a vector the mean of the time series between 2008 and 2012 (5 years) + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) # create a data.frame with the canton and the mean of 5 years + colnames(Prelev8182data) <- c('canton','Prelev8182') # give a name to the columns + + return(Prelev8182data) +} + diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19 new file mode 100644 index 0000000000000000000000000000000000000000..6a79953a26bd43e8ddc5044801e90c8d55b8a1f0 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19 @@ -0,0 +1,26 @@ +{ + "id": "E82EBE19", + "path": "~/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r", + "project_path": "MDR_Analyse_Irrigation_IG.r", + "type": "r_source", + "hash": "1299428435", + "contents": "", + "dirty": false, + "created": 1652863291409.0, + "source_on_save": false, + "relative_order": 2, + "properties": { + "source_window_id": "", + "Source": "Source", + "cursorPosition": "265,0", + "scrollLine": "0" + }, + "folds": "", + "lastKnownWriteTime": 1652874349, + "encoding": "UTF-8", + "collab_server": "", + "source_window": "", + "last_content_update": 1652874349712, + "read_only": false, + "read_only_alternatives": [] +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19-contents b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19-contents new file mode 100644 index 0000000000000000000000000000000000000000..f69406e26f86f34a84ffb2eabb6a33a493c13c3f --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/E82EBE19-contents @@ -0,0 +1,311 @@ +######################################################################################## +####### Comparaison des DEMANDE et TRANSFERTS modélisés aux PRELEVEMENTS AERMC ######### +######################################################################################## +# WARNING : since 17/05/2022, now using .dbf files in "user configuration" instead of .shp files +# the objectif being to not have to use maptools anymore + + +library(rgeos) +library(foreign) +library(sp) +library(raster) + +source('lib/utilitaire_irrigation.R') + + +# *** CONFIGURATION UTILISATEUR *** +# --------------------------------- + +# config='25MPS_Aleatoir' # == Nom_simu +# chemin="~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/" # chemin shape +# shp_file <- 'AleatoirIrrig_hrus_decoupees.shp' + + +#test3 +config <- 'test4_MA' #== Nom_simu +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.dbf' + +# +# config='25MPS_surest' # == Nom_simu +# chemin="~/DATA/SIG_MDR/irrigation/shape_HRUs_Francois/" # chemin shape +# shp_file='hrus_irriguees_decoupees.shp' + +chemin_sortie <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/' +pdfname <- paste0("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Comparaison_Irrig_", config, ".pdf") + + +# *** TRAITEMENT des SORTIES DU MODELE *** +# ---------------------------------------- + +Nom_simu <- config + +HRULoop <- ReadLoopDaily(paste0(chemin_sortie, Nom_simu, '/'), "HRULoop.dat", TRUE) +Dates <- HRULoop$dates + +# Code canton des HRUs irriguées +HRUs <- read.dbf(paste0(chemin, shp_file)) +regroup <- cbind(HRUs$CAT,HRUs$CODE_CAN_1) # join CAT - CODE_CAN_1 (deux id) +cantons <- regroup[order(regroup[,2]),] # sort +un_canton <- unique(cantons[,2]) # remove all duplicates + + +# Calcul des chroniques journalières : cantonXXX_Demande, cantonXXX_Transfert, de dimensions : ncol=nb_HRUS_in_canton, nrows=Ntime +for (cant in un_canton){ + Nom <- paste0('canton', cant) + HRUs_irr <- cantons[which(cantons[,2]== cant),1] # HRUs irriguées du canton "cant" + + Dem <- NULL + Transf <- NULL + for (k in HRUs_irr){ + Dem <- cbind(Dem,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationDemand')]) # L + Transf <- cbind(Transf,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationTotal')]) # L + } + +assign(paste0(Nom, '_Demande'), Dem) +assign(paste0(Nom, '_Transfert'), Transf) +} + +# Calcul des Demande et Transferts annuels interannuels par canton +Demande_interannuelle <- NULL +Transfert_interannuel <- NULL + +for (cant in un_canton){ + Nom <- paste0('canton', cant) + obj1 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Demande')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 + obj2 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Transfert')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 + z_dem <- mean(obj1 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 + z_transf <- mean( obj2 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 + Demande_interannuelle <- rbind( Demande_interannuelle, z_dem) + Transfert_interannuel <- rbind( Transfert_interannuel, z_transf) +} + +irrig_interannuelle_simu <- cbind(canton=un_canton,demande=Demande_interannuelle,transfert=Transfert_interannuel) +rownames(irrig_interannuelle_simu) <- NULL +colnames(irrig_interannuelle_simu) <- c('canton','demande','tranfert') + + +# *** PRELEVEMENTS AERMC *** +# -------------------------- +Prelev <- Prelev8182_1987_2007() # m3 . 81: GRAV ; 82 : non-grav +# Pour certains des cantons agricoles modélisés, les prélèvements sont nuls (eg le canton n'apparait pas dans les prélèvements de l'AERMC) +# => ajouter cette colonne aux prélèvements avec pour valeur 0 +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevAll <- prelev # m3/yr +colnames(PrelevAll) <- 'PrelevAll' + +Prelev <- Prelev82_1987_2007() +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevNonGrav <- prelev # m3/yr +colnames(PrelevNonGrav) <- 'PrelevNonGrav' + +Prelev <- Prelev8182_2008_2012() # +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevAll_post2008 <- prelev # m3/yr +colnames(PrelevAll_post2008) <- 'PrelevAll_post2008' + +# *** GRAPHES DE COMPARAISON PRELEV - TRANSFERTS *** +# -------------------------------------------------- + +# tous les cantons +comparaison <- cbind(irrig_interannuelle_simu, PrelevAll, PrelevNonGrav, PrelevAll_post2008) +save(comparaison,file= paste0('~/Documents/MDR/irrigation/RDATA/Comparaison_Irrig_', config, '.Rdata')) + + +# Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,1333,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3009, 3016, 3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +# +# Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1307, 1309,1312,1326,1327,1331,8319,8408,8411) +# +# Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) + +# seuls ceux présents à >99% sur notre domaine +Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +# +Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1326,1327,8319,8408,8411) +# +Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) + +# fichier à charger pour avoir les cultures dominantes par canton +canton_cult <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +canton_cult <- canton_cult[, c('CODE_CAN_1','LANDUSEID')] +canton_cult <- canton_cult[!duplicated(canton_cult$CODE_CAN_1),] + + +pdf(pdfname,paper <- "special",width=8,height=14) +layout(matrix(c(1,3,5,1,3,5,1,3,5,2,4,5),3,4)) +par (pty="m") + +# Rhone +mat_Rhone <- as.matrix(comparaison[which(comparaison[,1] %in% Cantons_Rhone),2:6]) +cantonlist <- comparaison[which(comparaison[,1] %in% Cantons_Rhone),1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist,canton_cult$CODE_CAN_1),2]), start=1, stop=3) +row.names(mat_Rhone) <- paste(cantonlist, culturelist) +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.65) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen","black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2, cex.names=0.65) + +par (pty="m") +# Durance +mat_Durance <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Durance), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Durance), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Durance) <- paste(cantonlist, culturelist) +petits <- (which(mat_Durance[, 3]/1000000<20)) +barplot(t(mat_Durance[petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="DURANCE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,20),cex.names=0.65) +barplot(t(mat_Durance[-petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,150),cex.names=0.65) + +par (pty="m") +# Saone +mat_Saone <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Saone), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Saone), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Saone) <- paste(cantonlist, culturelist) +barplot(t(mat_Saone)/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="SAONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2,cex.names=0.65) + +graphics.off() + + + +# *** ANALYSE PAR TYPE DE CULTURE et SOUS-BASSINS (code en cours d'écriture) *** +# ------------------------------------------------------------------------------ + +# -- Irrigation par hectare irrigué, pour détection des erreurs de surface ou pb de choix de méthode d'irrigation +culture_hru <- luid2cult(HRUs$LANDUSEID) + +irrigarea_cant <- NULL # SAU irriguée par canton dans notre modélisation (proche valeurs du RGA) +culture_cant <- NULL +for (cant in un_canton){ + irrigarea_cant <- c(irrigarea_cant, sum(HRUs$AREA[which(HRUs$CODE_CAN_1 ==cant)])/10000.) #hectares + culture_cant <- c(culture_cant, unique(culture_hru[which(HRUs$CODE_CAN_1 ==cant)])) +} + +comparaison_surf <- cbind(un_canton, irrig_interannuelle_simu[, 2:3]%/%irrigarea_cant, PrelevAll%/%irrigarea_cant) + +Rhone <- comparaison_surf[which(comparaison_surf[, 1] %in% Cantons_Rhone),] +cultures_Rhone <- culture_cant[which(un_canton %in% Cantons_Rhone )] + +N <- length(unique(cultures_Rhone)) #==> 5 types de culture : "Mais" "Prairies" "Vergers" "Vigne" "maraichage" +culture_locale <- unique(cultures_Rhone) + +pdfname <- paste0("~/Documents/MDR/irrigation/ComparaisonSurf_Rhone_", config, ".pdf") +pdf(pdfname,paper="special",width=8,height=14) +layout(matrix(1:N, N, 1)) +par (pty="m") + +for (cult in 1:N){ + mat <- as.matrix(Rhone[which(cultures_Rhone==culture_locale[cult]), 2:4]) + row.names(mat) <- Rhone[which(cultures_Rhone==culture_locale[cult]), 1] + barplot(t(mat), beside = TRUE, col = c("red", "blue", "green"), legend.text = TRUE, main= paste0('Rhone , ', culture_locale[cult]), xlab='cantons', ylab='m3 / hectare', las=2) +} + +graphics.off() +mat_Rhone <- as.matrix(comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 2:5]) +row.names(mat_Rhone) <- comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 1] +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.75) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2) + +######################################################################################## +####### Ajout de la contrainte Q > 10% MA (Module Annuel) pour Prélèv Irrigation ####### +######################################################################################## + +# fichiers pour extraction des MA : +simufile <- '~/JAMS/modeldata/J2K_Rhone_Barrages/output/BAR/' +filename <- 'ReachLoop.dat' + +# le vieux et nouveau Reach.par +paramdir <- '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/' +oldparfile <- 'reach.par' +newparfile <- 'reach_MA.par' # avec nouveau param MA + +# code +rloop <- ReadLoopDaily(simufile, filename, FALSE) +MA <- NULL +Nbreach <- dim(rloop$Data)[1]/length(rloop$dates) +Nbtime <- length(rloop$dates) +Ntot <- dim(rloop$Data)[1] +for (i in (1:Nbreach)){ # 1 to 3075 reaches + index <- seq(i, Ntot, Nbreach) + chronique <- rloop$Data[index, 2] + MA_tmp <- mean(chronique) + MA <- c(MA, MA_tmp) # L/d +} + +ID <- rloop$Data[1:Nbreach, 1] +MA <- rbind(ID, MA) + +reaches <- Chargement_param(oldReachParfile, parfile) +order <- match(ID, reaches$V1) # l'ordre des simus est inversé par rapport à l'ordre du reach.par... +MA <- MA[, order] + +newparamName <- "MA" +newparamVal <- round(MA[2,]) +newparamUnit <- "L/d" +add_param(paramdir,oldparfile,newparfile,newparamName,newparamVal,newparamUnit) + +######################################################################################## +####### Impact de la paramétrisation MA sur les débits journaliers sur qques HRUS ###### (en cours) +######################################################################################## + +# ex : cantons (retenu) 3909 (reach6222) : forte baisse de la demande suite à introduction de cette paramétrisation. +simnewdir <- paste0(chemin_sortie, config, "/") +filename <- 'ReachLoop.dat' + +simrefdir <- '~/JAMS/modeldata/J2K_Rhone_Natural_Hydrology/output/newREF/' + +simolddir <- paste0(chemin_sortie, 'test3', "/") + +myreach <- 6222 + +# code + +# avec MA +rloopnew <- ReadLoopDaily(simnewdir, filename, FALSE) +runoffnew <- rloopnew$Data[which(rloopnew$Data[, 1]==myreach), 'simRunoff'] +runoffnew <- xts(runoffnew, as.POSIXct(rloopnew$dates, format='%Y-%m-%d')) + +# Hydro Nat +rloopref <- ReadLoopDaily(simrefdir, filename, FALSE) +runoffref <- rloopref$Data[which(rloopref$Data[, 1]==myreach), 'simRunoff'] +runoffref <- xts(runoffref, as.POSIXct(rloopref$dates, format='%Y-%m-%d')) + +# irrig sans MA +rloopold <- ReadLoopDaily(simolddir, filename, FALSE) +runoffold <- rloopold$Data[which(rloopold$Data[, 1]==myreach), 'simRunoff'] +runoffold <- xts(runoffold, as.POSIXct(rloopold$dates, format='%Y-%m-%d')) + +MA <- mean(runoffref) +MAts <- xts(rep(MA, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) +MA10ts <- xts(rep(MA*.1, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) + +year <- '2009' +period <- paste0(year, '-05-01/', year, '-10-31') +plot(runoffref[period],ylim=c(0, MA*2), main="débit à l'aval du canton 3909 (CHEMIN, cult=mais, petit affluent", ylab='L/d') +lines(runoffnew[period],col=2) +lines(runoffold[period],col=4) +lines(MAts[period], col=3) +lines(MA10ts[period], col=3, lty=4) + +legend("topright",legend=names(variablesfut),y.intersp = 1, lty= 1,bty="n",col = colors,xpd=NA,cex=0.8) + diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759 new file mode 100644 index 0000000000000000000000000000000000000000..e89df33bffa71444d94648a6cd6624d44a376798 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759 @@ -0,0 +1,26 @@ +{ + "id": "EC4C7759", + "path": "~/Bureau/maestro/irrigation-R-codes/MDR_areaselect_irrigated_HRUs.r", + "project_path": "MDR_areaselect_irrigated_HRUs.r", + "type": "r_source", + "hash": "724752302", + "contents": "", + "dirty": false, + "created": 1652861704143.0, + "source_on_save": false, + "relative_order": 3, + "properties": { + "source_window_id": "", + "Source": "Source", + "cursorPosition": "191,0", + "scrollLine": "0" + }, + "folds": "", + "lastKnownWriteTime": 1652861948, + "encoding": "UTF-8", + "collab_server": "", + "source_window": "", + "last_content_update": 1652861948872, + "read_only": false, + "read_only_alternatives": [] +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759-contents b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759-contents new file mode 100644 index 0000000000000000000000000000000000000000..304beefa32eda9594839df7c8d1d3d4da69ee634 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/per/t/EC4C7759-contents @@ -0,0 +1,215 @@ +#~****************************************************************************** +#~* Selects the HRUs where irrigation is to be deployed, based on their area +#~* , canton location (irrigated or not) and comparison to SAU_irr_in_canton +#~* +#~* The area of diverse combinations of HRUs is compared to the SAU_irr_in_canton, +#~* starting with combinations of only 1 HRU in the canton, and increasing till +#~* being over the SAU_irr_in_canton. +#~* If the single-element alreading brings the HRU irrigated area above the SAU_irr_in_canton +#~* , the smallest HRU in the canton is irrigated and all others are not. +#~* The eligible combinations are tested in 3 passes with increasing tolerance to +#~* error in the total HRU irrigated area : 10 %, 30 % and 100 %. +#~* +#~* The results are +#~* * a vector irrigated (value : 0 or 1) with length: N_hrus_total +#~* * a vector irrig_type (value : 0, 1 or 2) with length: N_hrus_total. +#~* 1 = asp ; 2 = gag +#~* * a vector area_error indicating the % of error in surface committed with the new HRUirr +#~* +#~* - Le programme calcule les indices des HRUs qui irriguent un canton, en utilisant l'algorithme de combinaison de nombres afin d'additionner les surfaces des HRUs pour trouver la surface la plus proche possible de la surface totale du canton. +#~* +#~* - Le programme calcule également la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +#~* +#~* - Le programme écrit les résultats dans un fichier externe. +# +#~* - Le fichier externe contient 5 colonnes. La première est le numéro du HRU, la deuxième est sa surface, la troisième indique si le HRU irrigue le canton (1) ou non (0), la quatrième donne le type d'irrigation du HRU et la cinquième donne la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +# %). + +#~****************************************************************************** +#~* PROGRAMMER: Isabelle GOUTTEVIN (Irstea Lyon) +#~****************************************************************************** +#~* CREATED/MODIFIED: +# Created 2015-12-09 by Isabelle GOUTTEVIN (Irstea Lyon) +# Modified 2022-05-04 by Theo L (INRAE Lyon) +#~****************************************************************************** + +library(combinat) +library(foreign) + + +# *** FONCTIONS *** + +# ------------------------------------------------------------------------ +# index_of_nearest <- function(x, number){ +# Finds the index of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The index of the element in x that is closest to number +# ------------------------------------------------------------------------ +index_of_nearest <- function(x, number){ + return (which(abs(x-number)==min(abs(x-number))))} + + + + +# ------------------------------------------------------------------------ +#value_of_nearest(c(5,2,1),6) +# value_of_nearest <- function(x, number) +# Finds the value of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The value of the element in x that is closest to number +# ------------------------------------------------------------------------ +value_of_nearest <- function(x, number){ + return (x[which(abs(x-number)==min(abs(x-number)))])} + + + + +# ------------------------------------------------------------------------ +# try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance) +# Tries to find the combination of HRUs that best fits the given irrigation area. +# Args: +# n: The current number of HRUs to be added to the combination +# S_HRUs: A vector of HRUs' surface +# S_irr_Canton: The target irrigation area +# tolerance: The maximum error tolerated by the user +# Returns: +# The index of the HRUs that best fit the target irrigation area, +# " continue " if the current combination does not work but a smaller combination might, +# " non convergence " if the current combination does not work and neither does a smaller combination. +# ------------------------------------------------------------------------ +try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance){ + # { + if (n < length(S_HRUs)){ + combi <- combn(S_HRUs, n) + } else { + combi <- t(t(S_HRUs)) # TODO is transposing it twice a good idea? isn't it useless? + } + sumcombi <- apply(combi, 2, sum) + # } TODO from testing, all this part is useless. need confirmation tho ~~~~ + + nearestarea <- value_of_nearest(sumcombi, S_irr_Canton) + error_nearest <- abs(1-nearestarea/S_irr_Canton)*100. + + if (error_nearest[1] < tolerance){ + + combi_selected <- index_of_nearest(sumcombi, S_irr_Canton) + index_selected <- NULL + for (i in 1:n){ + index_selected <- c(index_selected, which(S_HRUs==combi[, combi_selected][i])) + } + return (index_selected) + + } else if (min(sumcombi) > S_irr_Canton){ + + if (n==1){ + return(which(sumcombi==min(sumcombi))) + } else { + return ("non convergence") + } + } else { + return ("continue") + } +} + +# ------------------------------------------------------------------------ +# main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) +# Main function of the irrigation assignment process. +# Args: +# hrus_irrig_cantons_filePath: The path to the HRUs irrigated cantons data file +# cantons_irrigues_filePath: The path to the cantons irrigated data file +# Returns: +# A file with the irrigation status of every HRU +# ------------------------------------------------------------------------ +main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) { + hrus_irrig_cantons <- read.dbf(hrus_irrig_cantons_filePath) + cantons_irrigues <- read.dbf(cantons_irrigues_filePath) + + + N_hru <- dim(hrus_irrig_cantons)[1] + + # creates two vector of the size of the number of currently irrigated HRUs + irrigated <- rep(0, N_hru) + area_error <- rep(0, N_hru) + + # creates a vector of the size of the number irrigated cantons + canton_traite <- rep(0, dim(cantons_irrigues)[1]) + + + tolerances <- c(10, 30, 100) + + for(tolerance in tolerances) { + for (numcanton in cantons_irrigues$CODE_CAN_1[which(canton_traite==0)]){ + + # TODO is this useful in any way ? indice_canton is just the index of numcanton rn, there must be a less heavy method + indice_canton <- which(cantons_irrigues$CODE_CAN_1==numcanton) + + # Find the HRU of the current canton + hrus <- hrus_irrig_cantons[which(hrus_irrig_cantons$CODE_CAN_1==numcanton), ] + + if (dim(hrus)[1]<=0){ + canton_traite[indice_canton] <- 1 + } else { + indices <- which(hrus_irrig_cantons$CODE_CAN_1==numcanton) # trouve le(s) HRU(s) associe au canton etudie + S_HRUs <- hrus$AREA # surface du/des HRU(s) en m2 + + S_irr_Canton <- cantons_irrigues[which(cantons_irrigues$CODE_CAN_1==numcanton), ]$SAU_IRR*100. # le "*100" lie au fait que les donnees du RGA sont en ares = 100m2 + + index_of_HRUs <- "continue" + n_elements_combi <- 1 + while ((index_of_HRUs=="continue") && (n_elements_combi <= length(S_HRUs))){ + index_of_HRUs <- try_combination(n_elements_combi, S_HRUs, S_irr_Canton, tolerance) + n_elements_combi <- n_elements_combi+1 + } + + if (index_of_HRUs=="non convergence" || index_of_HRUs=="continue") { + irrigated[indices] <- NA + } else { + irrigated[indices] <- 0 + irrigated[indices[index_of_HRUs]] <- 1 + area_error[indices] <- (sum(S_HRUs[index_of_HRUs])/S_irr_Canton-1)*100. + + canton_traite[indice_canton] <- 1 + } + } + } + } + + + irrig_type <- rep(0, N_hru) + + # il est entrain de mettre un vector dans chaque case du vector là , je me trompe ? + irrig_type[which(irrigated >0)] <- hrus_irrig_cantons$IRRIG_TYPE[which(irrigated >0)] + + # TODO put it in the right folder + write.table(cbind(hrus_irrig_cantons$CAT,hrus_irrig_cantons$AREA, irrigated, irrig_type, area_error),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv',append=F, sep="\t", row.names=FALSE, col.names=c('HRUnum', 'HRUarea', 'irrigated', 'irrig_type', 'area_error')) +} + +# *** MAIN CODE *** +# ----------------- + +main('/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf', + '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf' +) + + + +# TODO what is this ? -> +# Annexe : creation de la table des surfaces irriguees modelisees par canton + +# library(foreign) # there is no need to load this lib twice, right? + +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# # TODO put it in the right folder +# write.table(cbind(sort(unique(HRU_Aleatoir$CODE_CAN_1)),SHRUirr_can),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Bilan_HRU_Aleatoir.txt',append=F, sep="\t", row.names=FALSE, col.names=c('canton', 'HRUirrig_area')) + diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/082D99D8 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/082D99D8 new file mode 100644 index 0000000000000000000000000000000000000000..1270c0ef570e323130e5495e195f5b812aee62ec --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/082D99D8 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "191,0", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/09FC9B8E b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/09FC9B8E new file mode 100644 index 0000000000000000000000000000000000000000..45c84888c32e6ec1d67ce8ca9fb3b274e751030d --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/09FC9B8E @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "45,22", + "scrollLine": "40" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/17087C8C b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/17087C8C new file mode 100644 index 0000000000000000000000000000000000000000..223f742f7880d01f2a1e4ff4a44768365b6177d1 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/17087C8C @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "40,89", + "scrollLine": "29" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1B73E668 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1B73E668 new file mode 100644 index 0000000000000000000000000000000000000000..0f823fc9a5504e24c09f28b3e40237723777c670 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1B73E668 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "87,32", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1F4205F2 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1F4205F2 new file mode 100644 index 0000000000000000000000000000000000000000..8021171411b8d46f57c218ca057e7fad35896863 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/1F4205F2 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "13,0", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2055E6C5 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2055E6C5 new file mode 100644 index 0000000000000000000000000000000000000000..e9da9418d67bd02b462c0248d945699991abfbf1 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2055E6C5 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "0,24", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2511A65E b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2511A65E new file mode 100644 index 0000000000000000000000000000000000000000..2804a9c51ca642e6044f963a2a149a4513356e03 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2511A65E @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "265,0", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2A75760D b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2A75760D new file mode 100644 index 0000000000000000000000000000000000000000..e9da9418d67bd02b462c0248d945699991abfbf1 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/2A75760D @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "0,24", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3442B7E0 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3442B7E0 new file mode 100644 index 0000000000000000000000000000000000000000..bb276909ee70e4175f228787abe8a4a2df0886d8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3442B7E0 @@ -0,0 +1,4 @@ +{ + "source_window_id": "", + "Source": "Source" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3FCD3112 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3FCD3112 new file mode 100644 index 0000000000000000000000000000000000000000..8284daddf7fd1667b13ddf4177744ba13f6ee258 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/3FCD3112 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "64,44", + "scrollLine": "64" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/450EB367 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/450EB367 new file mode 100644 index 0000000000000000000000000000000000000000..920742baa8e6534c5737fac55bacca6631a83aa1 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/450EB367 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "161,0", + "scrollLine": "151" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9339DA25 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9339DA25 new file mode 100644 index 0000000000000000000000000000000000000000..aa48e88aa3378174ed74021b8382a97fd22b775c --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9339DA25 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "471,22", + "scrollLine": "460" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/93F91E7D b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/93F91E7D new file mode 100644 index 0000000000000000000000000000000000000000..bb276909ee70e4175f228787abe8a4a2df0886d8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/93F91E7D @@ -0,0 +1,4 @@ +{ + "source_window_id": "", + "Source": "Source" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9FEE2462 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9FEE2462 new file mode 100644 index 0000000000000000000000000000000000000000..04d3ea48ef87fa5577d6257da5720348669d1892 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/9FEE2462 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "66,46", + "scrollLine": "48" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/A316367F b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/A316367F new file mode 100644 index 0000000000000000000000000000000000000000..4ad75e10118803c290457a25a4f17f37358ee201 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/A316367F @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "48,0", + "scrollLine": "21" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/AEC2172D b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/AEC2172D new file mode 100644 index 0000000000000000000000000000000000000000..d87ebdc62bcd53b567caddeb76119346d17963d8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/AEC2172D @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "389,0", + "scrollLine": "382" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B21ABEBA b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B21ABEBA new file mode 100644 index 0000000000000000000000000000000000000000..56434f9ef94b3963fb551d04d1350e440862f2c8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B21ABEBA @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "146,0", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B25A9746 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B25A9746 new file mode 100644 index 0000000000000000000000000000000000000000..204b63dc6112676c2f7b917c954dad2d70553cf0 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/B25A9746 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "177,48", + "scrollLine": "169" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/BEF645ED b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/BEF645ED new file mode 100644 index 0000000000000000000000000000000000000000..0ef239270e4c91e2f358747f2cec2b190aeb28fd --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/BEF645ED @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "324,43", + "scrollLine": "317" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D3B14105 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D3B14105 new file mode 100644 index 0000000000000000000000000000000000000000..6de5eb1f71bd06a8e10564aa6c230504d545aa2e --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D3B14105 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "133,5", + "scrollLine": "127" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D40324B9 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D40324B9 new file mode 100644 index 0000000000000000000000000000000000000000..68492b82c778b60c3aa5dc47ecd3c693321313a3 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/D40324B9 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "18,0", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DAAD3305 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DAAD3305 new file mode 100644 index 0000000000000000000000000000000000000000..bb276909ee70e4175f228787abe8a4a2df0886d8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DAAD3305 @@ -0,0 +1,4 @@ +{ + "source_window_id": "", + "Source": "Source" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DF31A5D6 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DF31A5D6 new file mode 100644 index 0000000000000000000000000000000000000000..a9946e0cd7284e748f6ba4a9d9625b2064eaa18d --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/DF31A5D6 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "10,9", + "scrollLine": "126" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/E545822B b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/E545822B new file mode 100644 index 0000000000000000000000000000000000000000..bb276909ee70e4175f228787abe8a4a2df0886d8 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/E545822B @@ -0,0 +1,4 @@ +{ + "source_window_id": "", + "Source": "Source" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F26970F3 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F26970F3 new file mode 100644 index 0000000000000000000000000000000000000000..685518cac94f8151fa7923dd07cd5d9a1a2440ea --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F26970F3 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "3,1", + "scrollLine": "0" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F4A41083 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F4A41083 new file mode 100644 index 0000000000000000000000000000000000000000..e410bca223d5e23cdce87be03b2cc76dffb6943e --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/F4A41083 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "468,37", + "scrollLine": "457" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/FC1453E0 b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/FC1453E0 new file mode 100644 index 0000000000000000000000000000000000000000..0088964a3c453a4bf89dfcb62e179d5ab5dc95a9 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/FC1453E0 @@ -0,0 +1,6 @@ +{ + "source_window_id": "", + "Source": "Source", + "cursorPosition": "224,2", + "scrollLine": "217" +} \ No newline at end of file diff --git a/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/INDEX b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/INDEX new file mode 100644 index 0000000000000000000000000000000000000000..45e1b8658267e67a2b522c2cb6f9403dc6edd6a7 --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/4644B31D/sources/prop/INDEX @@ -0,0 +1,27 @@ +%2Fmedia%2Ftlabrosse%2Fdisk%2Fhydrotools%2FIvan%2FMisc%2FTestCladuegne_20200507.R="3442B7E0" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2FMDR_Analyse_Irrigation_IG.r="2511A65E" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2FMDR_areaselect_irrigated_HRUs.r="082D99D8" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2FMDR_select_culture_irrigu%C3%A9e.r="B21ABEBA" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FAnalyse_hrus_function.r="2A75760D" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FFunctions_post_treatment.r="9339DA25" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FMDR_AERMCprelev.r="3FCD3112" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FMDR_utilitaires.r="FC1453E0" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FSoil_proportion_function_library.r="2055E6C5" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2FaggregateZoo_functions.r="D40324B9" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Fcriteria_functions.R="D3B14105" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Ffirst.R="450EB367" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Freadwrite_functions_J2000.R="BEF645ED" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Ftest.R="A316367F" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Futilitaire_irrigation.R="B25A9746" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib%2Fzoo_functions.r="09FC9B8E" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FAnalyse_hrus_function.r="17087C8C" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FFunctions_post_treatment.r="AEC2172D" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FJ2000_postprocessing_functions.R="E545822B" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FMDR_AERMCprelev.r="1B73E668" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FMDR_utilitaires.r="F4A41083" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FSoil_proportion_function_francois.r="9FEE2462" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2FaggregateZoo_functions.r="1F4205F2" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2Fcriteria_functions.R="93F91E7D" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2Freadwrite_functions_J2000.R="DF31A5D6" +~%2FBureau%2Fmaestro%2Firrigation-R-codes%2Flib_old%2Fzoo_functions.r="F26970F3" +~%2FBureau%2Fmaestro%2Firrigation-R-codes-bkup%2FMDR_Analyse_Irrigation_IG.r="DAAD3305" diff --git a/irrigation-R-codes/.Rproj.user/shared/notebooks/patch-chunk-names b/irrigation-R-codes/.Rproj.user/shared/notebooks/patch-chunk-names new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/.Rproj.user/shared/notebooks/paths b/irrigation-R-codes/.Rproj.user/shared/notebooks/paths new file mode 100644 index 0000000000000000000000000000000000000000..c706c0dcfe1e29abfcc292c0f2955cf2b715dafb --- /dev/null +++ b/irrigation-R-codes/.Rproj.user/shared/notebooks/paths @@ -0,0 +1,15 @@ +/home/tlabrosse/Bureau/maestro/irrigation-R-codes-bkup/MDR_Analyse_Irrigation_IG.r="1168B337" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r="D980930B" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/MDR_areaselect_irrigated_HRUs.r="02D5C0CB" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/MDR_select_culture_irriguée.r="32D25EDA" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib/utilitaire_irrigation.R="A5967C75" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/Analyse_hrus_function.r="EC9F9EE0" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/Functions_post_treatment.r="7FF9845B" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/J2000_postprocessing_functions.R="A4D59AF8" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/MDR_AERMCprelev.r="8F79E6E5" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/MDR_utilitaires.r="963DB0BA" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/Soil_proportion_function_francois.r="6EAFF84F" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/aggregateZoo_functions.r="70F799E3" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/criteria_functions.R="FFE23EAC" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/readwrite_functions_J2000.R="153BC8E8" +/home/tlabrosse/Bureau/maestro/irrigation-R-codes/lib_old/zoo_functions.r="F05A8209" diff --git a/irrigation-R-codes/.idea/.gitignore b/irrigation-R-codes/.idea/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..26d33521af10bcc7fd8cea344038eaaeb78d0ef5 --- /dev/null +++ b/irrigation-R-codes/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/irrigation-R-codes/.idea/inspectionProfiles/Project_Default.xml b/irrigation-R-codes/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000000000000000000000000000000000000..de1f6a7a2961bcb75b01559c403f05dfe65910f5 --- /dev/null +++ b/irrigation-R-codes/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,12 @@ +<component name="InspectionProjectProfileManager"> + <profile version="1.0"> + <option name="myName" value="Project Default" /> + <inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true"> + <option name="ignoredIdentifiers"> + <list> + <option value="set.__getitem__" /> + </list> + </option> + </inspection_tool> + </profile> +</component> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/inspectionProfiles/profiles_settings.xml b/irrigation-R-codes/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000000000000000000000000000000000000..105ce2da2d6447d11dfe32bfb846c3d5b199fc99 --- /dev/null +++ b/irrigation-R-codes/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ +<component name="InspectionProjectProfileManager"> + <settings> + <option name="USE_PROJECT_PROFILE" value="false" /> + <version value="1.0" /> + </settings> +</component> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/irrigation-R-codes.iml b/irrigation-R-codes/.idea/irrigation-R-codes.iml new file mode 100644 index 0000000000000000000000000000000000000000..d0876a78d06ac03b5d78c8dcdb95570281c6f1d6 --- /dev/null +++ b/irrigation-R-codes/.idea/irrigation-R-codes.iml @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> +<module type="PYTHON_MODULE" version="4"> + <component name="NewModuleRootManager"> + <content url="file://$MODULE_DIR$" /> + <orderEntry type="inheritedJdk" /> + <orderEntry type="sourceFolder" forTests="false" /> + </component> +</module> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/misc.xml b/irrigation-R-codes/.idea/misc.xml new file mode 100644 index 0000000000000000000000000000000000000000..d1e22ecb89619a9c2dcf51a28d891a196d2462a0 --- /dev/null +++ b/irrigation-R-codes/.idea/misc.xml @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8" project-jdk-type="Python SDK" /> +</project> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/modules.xml b/irrigation-R-codes/.idea/modules.xml new file mode 100644 index 0000000000000000000000000000000000000000..bd8dc09315bf01b8de8f35108b89376428d9a7d4 --- /dev/null +++ b/irrigation-R-codes/.idea/modules.xml @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="ProjectModuleManager"> + <modules> + <module fileurl="file://$PROJECT_DIR$/.idea/irrigation-R-codes.iml" filepath="$PROJECT_DIR$/.idea/irrigation-R-codes.iml" /> + </modules> + </component> +</project> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/rGraphicsSettings.xml b/irrigation-R-codes/.idea/rGraphicsSettings.xml new file mode 100644 index 0000000000000000000000000000000000000000..94499d797256bfd6e29b52ef89d4935740c783c0 --- /dev/null +++ b/irrigation-R-codes/.idea/rGraphicsSettings.xml @@ -0,0 +1,9 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="RGraphicsSettings"> + <option name="height" value="450" /> + <option name="resolution" value="75" /> + <option name="version" value="2" /> + <option name="width" value="720" /> + </component> +</project> \ No newline at end of file diff --git a/irrigation-R-codes/.idea/rSettings.xml b/irrigation-R-codes/.idea/rSettings.xml new file mode 100644 index 0000000000000000000000000000000000000000..97e6f1f160db88405e9db01b0ae6ba45adbed3f4 --- /dev/null +++ b/irrigation-R-codes/.idea/rSettings.xml @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project version="4"> + <component name="RSettings"> + <option name="interpreterPath" value="/usr/bin/R" /> + </component> +</project> \ No newline at end of file diff --git a/irrigation-R-codes/Irrigation.tar.gz b/irrigation-R-codes/Irrigation.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..c0880c51e4c71d0c8f88b3ae3fa1731f59af6986 Binary files /dev/null and b/irrigation-R-codes/Irrigation.tar.gz differ diff --git a/irrigation-R-codes/Irrigation/Bilan_irrigation.xlsx b/irrigation-R-codes/Irrigation/Bilan_irrigation.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..325e424ac8210aa3ae822dda4c50fc25df91b30e Binary files /dev/null and b/irrigation-R-codes/Irrigation/Bilan_irrigation.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Bilan_simulation_irrigation.txt b/irrigation-R-codes/Irrigation/Bilan_simulation_irrigation.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1ef4b5b63e201294c3b5862dc03b3c2eb66e65b --- /dev/null +++ b/irrigation-R-codes/Irrigation/Bilan_simulation_irrigation.txt @@ -0,0 +1,5701 @@ +C:/jamsmodeldata/J2K_Buech/output/ - 20151013_162319 +Canton Demande Transfert Prélèvement +427 7.00E+07 3.32E+06 2.68E+07 +509 5.74E+07 2.63E+06 1.32E+08 +512 6.11E+07 3.98E+06 8.29E+07 +515 1.41E+07 1.03E+06 1.77E+06 +516 2.41E+07 1.26E+06 2.37E+07 +522 1.19E+08 1.36E+07 2.08E+07 +524 4.52E+07 7.27E+06 3.06E+07 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20150929_103735 +Canton Demande Transfert Prélèvement +101 4.30E+07 4.80E+06 6.50E+06 6.61 +102 1.87E+08 6.27E+06 2.41E+06 77.63 +117 2.16E+08 1.86E+07 9.19E+07 2.35 +118 6.86E+07 1.54E+06 2.45E+06 27.96 +119 6.12E+08 1.36E+07 4.31E+07 14.22 +120 4.20E+08 2.54E+06 3.84E+07 10.96 +126 1.81E+08 1.40E+07 NA +135 9.35E+08 5.65E+06 2.30E+06 407.39 +140 1.12E+08 4.22E+05 4.46E+06 25.03 +410 2.54E+08 1.72E+07 1.87E+08 1.36 +413 4.86E+08 8.34E+07 1.25E+07 38.82 +414 1.80E+08 2.66E+07 2.23E+08 0.81 +416 2.59E+07 1.03E+07 7.48E+07 0.35 +419 5.54E+07 5.94E+06 8.26E+07 0.67 +420 2.06E+08 9.10E+06 1.90E+05 1080.9 +421 1.82E+09 5.26E+07 3.35E+05 5420.4 +427 8.59E+07 1.04E+07 2.68E+07 3.2 +429 9.34E+08 5.48E+07 2.80E+07 33.4 +430 1.09E+08 3.13E+07 4.05E+08 0.27 +505 2.86E+08 3.08E+06 1.62E+07 17.68 +509 4.42E+08 6.68E+06 1.32E+08 3.34 +512 1.18E+08 1.94E+07 8.29E+07 1.42 +515 1.99E+07 1.20E+06 1.77E+06 11.26 +516 2.56E+07 2.67E+06 2.37E+07 1.08 +518 5.10E+07 2.93E+07 4.13E+07 1.23 +522 1.89E+08 1.51E+07 2.08E+07 9.08 +523 1.99E+08 1.11E+07 6.81E+05 291.76 +524 6.82E+07 8.77E+06 3.06E+07 2.23 +717 2.29E+08 6.50E+06 3.65E+06 62.75 +722 2.08E+08 1.17E+07 3.87E+06 53.59 +724 2.17E+08 1.51E+07 1.35E+06 160.29 +1307 5.51E+08 7.65E+06 1.19E+08 4.64 +1309 8.53E+08 7.62E+06 1.10E+09 0.78 +1312 3.03E+08 1.09E+07 3.66E+08 0.83 +1326 1.41E+09 1.39E+07 3.19E+08 4.42 +1327 5.51E+08 5.15E+07 7.52E+08 0.73 +1331 2.66E+08 2.17E+05 4.62E+08 0.58 +1333 1.80E+08 3.56E+05 4.65E+05 387.41 +2103 1.32E+08 1.31E+07 2.44E+06 53.87 +2114 3.86E+08 1.60E+07 5.05E+06 76.36 +2134 1.54E+08 1.78E+07 2.11E+06 72.77 +2138 1.28E+08 2.11E+06 9.46E+05 135.72 +2602 7.59E+08 4.35E+07 1.43E+08 5.29 +2604 9.53E+08 1.04E+07 1.36E+07 70.11 +2607 5.94E+08 4.38E+07 6.36E+07 9.34 +2611 6.40E+08 1.61E+07 2.06E+07 31 +2613 2.92E+08 2.69E+07 3.80E+07 7.67 +2615 4.36E+08 1.68E+07 5.03E+07 8.67 +2616 4.82E+08 3.07E+07 6.16E+07 7.82 +2619 6.65E+08 3.08E+07 3.25E+07 20.48 +2621 1.49E+09 1.89E+07 3.85E+07 38.68 +2623 6.33E+08 1.07E+07 1.60E+07 39.54 +2625 4.49E+08 1.08E+07 4.59E+07 9.79 +2626 1.07E+09 2.64E+07 4.27E+07 25.09 +2628 2.42E+08 2.19E+07 6.88E+07 3.51 +2629 4.88E+07 3.45E+06 2.97E+07 1.64 +2632 6.34E+07 1.80E+06 6.04E+06 10.51 +2634 3.70E+08 9.26E+06 7.22E+07 5.12 +3006 6.73E+08 1.95E+07 1.19E+06 565.85 +3009 2.32E+08 5.80E+06 5.98E+07 3.88 +3016 1.53E+08 1.83E+05 1.67E+05 911.57 +3023 6.91E+08 1.66E+07 1.39E+07 49.57 +3026 5.07E+08 1.59E+07 5.80E+06 87.39 +3802 8.09E+08 1.35E+07 1.79E+07 45.24 +3807 8.91E+08 8.34E+06 2.09E+07 42.54 +3808 1.01E+09 1.33E+07 3.13E+07 32.29 +3815 4.04E+08 5.80E+06 2.50E+06 161.8 +3819 8.77E+08 2.25E+07 3.22E+07 27.26 +3822 4.75E+06 4.37E+06 2.23E+08 0.02 +3824 4.18E+08 1.40E+07 5.36E+07 7.8 +3825 3.20E+08 1.28E+07 6.70E+06 47.7 +3830 3.12E+08 8.18E+06 5.01E+07 6.23 +3837 2.97E+08 8.83E+06 7.87E+06 37.66 +3846 1.47E+08 6.42E+06 2.01E+07 7.32 +3853 4.00E+08 1.10E+07 3.38E+06 118.08 +3909 4.51E+08 1.25E+07 5.98E+06 75.34 +4213 2.37E+08 7.30E+06 8.68E+06 27.27 +4233 1.15E+08 4.29E+06 1.48E+06 77.47 +6905 3.14E+08 1.65E+07 1.42E+06 221.4 +6907 3.90E+07 1.39E+07 2.29E+06 17.04 +6910 7.22E+07 3.85E+06 0.00E+00 #DIV/0! +6924 3.12E+08 3.42E+06 0.00E+00 #DIV/0! +6925 2.12E+08 6.27E+06 8.10E+05 261.5 +6931 2.25E+08 3.25E+06 1.88E+05 1198.77 +6937 5.06E+08 4.30E+06 1.91E+07 26.42 +6938 8.65E+08 5.64E+06 1.37E+07 63.28 +6944 2.58E+08 6.08E+05 5.22E+07 4.95 +6945 6.02E+07 3.85E+05 8.52E+04 705.68 +6948 1.71E+06 9.08E+05 0.00E+00 #DIV/0! +6949 4.56E+06 4.56E+06 1.14E+06 3.99 +7116 1.46E+08 1.08E+07 6.47E+05 225.59 +7151 2.89E+08 3.13E+07 1.13E+06 254.64 +7405 3.10E+07 2.18E+06 9.62E+02 32182.93 +8319 7.10E+08 3.96E+07 5.83E+08 1.22 +8405 9.57E+08 1.83E+07 4.60E+06 207.92 +8406 7.47E+08 2.04E+07 3.92E+07 19.08 +8408 6.39E+08 4.16E+07 2.79E+06 228.71 +8409 1.14E+09 1.87E+07 3.83E+06 297.36 +8411 3.59E+08 2.91E+07 1.02E+09 0.35 +8413 9.46E+08 2.69E+06 8.79E+08 1.08 +8415 2.02E+08 4.95E+06 1.45E+04 13928.07 +8416 1.58E+09 1.98E+07 6.32E+07 25.05 +8418 4.47E+08 1.58E+07 1.02E+06 436.07 +8423 1.35E+08 2.15E+07 3.62E+08 0.37 +C:/jamsmodeldata/J2K_Buech/output/ - 20151014_100039 +Canton Demande Transfert Prélèvement +427 1.04E+08 3.63E+06 2.68E+07 3.87 +509 7.21E+07 2.73E+06 1.32E+08 0.54 +512 1.16E+08 4.34E+06 8.29E+07 1.4 +515 2.63E+07 1.12E+06 1.77E+06 14.83 +516 4.58E+07 1.37E+06 2.37E+07 1.94 +522 2.10E+08 1.51E+07 2.08E+07 10.07 +524 8.13E+07 8.16E+06 3.06E+07 2.65 +C:/jamsmodeldata/J2K_Buech/output/ - 20151014_100521 +Canton Demande Transfert Prélèvement +427 7.00E+07 3.32E+06 2.68E+07 2.61 +509 5.74E+07 2.63E+06 1.32E+08 0.43 +512 6.11E+07 3.98E+06 8.29E+07 0.74 +515 1.41E+07 1.03E+06 1.77E+06 7.94 +516 2.41E+07 1.26E+06 2.37E+07 1.02 +522 1.19E+08 1.36E+07 2.08E+07 5.7 +524 4.52E+07 7.27E+06 3.06E+07 1.48 +C:/jamsmodeldata/J2K_Buech/output/ - 20151014_100653 +Canton Demande Transfert Prélèvement +427 7.00E+07 3.32E+06 2.68E+07 2.61 +509 5.74E+07 2.63E+06 1.32E+08 0.43 +512 6.11E+07 3.98E+06 8.29E+07 0.74 +515 1.41E+07 1.03E+06 1.77E+06 7.94 +516 2.41E+07 1.26E+06 2.37E+07 1.02 +522 1.19E+08 1.36E+07 2.08E+07 5.7 +524 4.52E+07 7.27E+06 3.06E+07 1.48 +C:/jamsmodeldata/J2K_Buech/output/ - 20151014_104303 +Canton Demande Transfert Prélèvement +427 7.88E+06 2.14E+06 2.68E+07 0.29 +509 3.46E+06 1.45E+06 1.32E+08 0.03 +512 9.13E+06 2.66E+06 8.29E+07 0.11 +515 2.09E+06 8.38E+05 1.77E+06 1.18 +516 2.99E+06 8.81E+05 2.37E+07 0.13 +522 1.68E+07 7.33E+06 2.08E+07 0.81 +524 6.81E+06 3.86E+06 3.06E+07 0.22 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151015_085135 +Canton Demande Transfert Prélèvement rapport surface irriguée SAU/HRU demande/prel demande corrigée demande corrigée / prélèvement +101 4.30E+07 4.80E+06 6.50E+06 0.249596028 6.61 10727422.65 1.65 +102 1.87E+08 6.27E+06 2.41E+06 0.055491088 77.63 10396772.5 4.31 +117 2.16E+08 1.86E+07 9.19E+07 0.378252502 2.35 81724933 0.89 +118 6.86E+07 1.54E+06 2.45E+06 0.13657754 27.96 9368333 3.82 +119 5.89E+08 1.36E+07 4.31E+07 0.174539025 13.67 102769315.3 2.39 +120 4.20E+08 2.54E+06 3.84E+07 0.190541427 10.96 80113939.87 2.09 +126 1.81E+08 1.40E+07 NA 0.010049469 #VALEUR! 1814621.18 #VALEUR! +135 5.58E+08 2.16E+06 2.30E+06 0.033745233 243.21 18838477.32 8.21 +140 1.12E+08 4.22E+05 4.46E+06 0.289970031 25.03 32387908.35 7.26 +410 2.54E+08 1.72E+07 1.87E+08 0.120467626 1.36 30608695.56 0.16 +413 4.86E+08 8.34E+07 1.25E+07 0.117228565 38.82 56938075.46 4.55 +414 1.80E+08 2.66E+07 2.23E+08 0.173669886 0.81 31259393.12 0.14 +416 2.59E+07 1.03E+07 7.48E+07 0.161018248 0.35 4169693.94 0.06 +419 5.54E+07 5.94E+06 8.26E+07 0.173702091 0.67 9623157.83 0.12 +420 1.91E+08 1.04E+07 1.90E+05 0.036320225 1004.11 6939529.6 36.47 +421 1.82E+09 5.26E+07 3.35E+05 0.120775356 5420.4 219485978.1 654.65 +427 8.30E+07 1.11E+07 2.68E+07 0.274085974 3.09 22743616.86 0.85 +429 9.34E+08 5.48E+07 2.80E+07 0.137717342 33.4 128598926.7 4.6 +430 1.09E+08 3.13E+07 4.05E+08 0.139879125 0.27 15240719.61 0.04 +505 2.90E+08 3.29E+06 1.62E+07 0.07095875 17.93 20596060.47 1.27 +509 4.42E+08 6.68E+06 1.32E+08 0.118758812 3.34 52502289.46 0.4 +512 1.17E+08 1.91E+07 8.29E+07 0.179555246 1.41 21022766.32 0.25 +515 1.99E+07 1.20E+06 1.77E+06 0.282920488 11.26 5641453.77 3.18 +516 2.49E+07 2.29E+06 2.37E+07 0.347237654 1.05 8645704.03 0.37 +518 5.10E+07 2.93E+07 4.13E+07 0.085233124 1.23 4342639.49 0.11 +522 1.85E+08 1.54E+07 2.08E+07 0.082344681 8.89 15245155.22 0.73 +523 1.99E+08 1.11E+07 6.81E+05 0.104271227 291.62 20699494.99 30.41 +524 6.82E+07 8.77E+06 3.06E+07 0.126302356 2.23 8609110.36 0.28 +717 2.04E+08 7.36E+06 3.65E+06 0.082639225 55.8 16821770.45 4.61 +722 2.10E+08 1.19E+07 3.87E+06 0.081213368 54.26 17071536.2 4.41 +724 1.98E+08 1.83E+07 1.35E+06 0.07483458 146.68 14829630.54 10.98 +1307 5.51E+08 7.65E+06 1.19E+08 1.634377892 4.64 900427960.7 7.59 +1309 8.53E+08 7.62E+06 1.10E+09 1.41429639 0.78 1206600523 1.1 +1312 3.03E+08 1.09E+07 3.66E+08 1.188447712 0.83 359918003.6 0.98 +1326 1.41E+09 1.39E+07 3.19E+08 0.228797801 4.42 322466525.6 1.01 +1327 5.49E+08 5.22E+07 7.52E+08 0.134069999 0.73 73576202.41 0.1 +1331 2.66E+08 2.17E+05 4.62E+08 3.190897358 0.58 848854513 1.84 +1333 1.80E+08 3.56E+05 4.65E+05 5.968340342 387.41 1074240432 2312.2 +2103 1.31E+08 1.31E+07 2.44E+06 0.044345546 53.82 5830428.16 2.39 +2114 3.71E+08 1.66E+07 5.05E+06 0.031000628 73.38 11488677.98 2.27 +2134 1.52E+08 1.79E+07 2.11E+06 0.027993582 72.1 4257853.07 2.02 +2138 1.28E+08 2.20E+06 9.46E+05 0.014821848 135.53 1900085.89 2.01 +2602 7.59E+08 4.35E+07 1.43E+08 0.272631148 5.29 206828012.1 1.44 +2604 9.53E+08 1.04E+07 1.36E+07 0.269047297 70.11 256414494.3 18.86 +2607 5.87E+08 4.46E+07 6.36E+07 0.135447858 9.24 79574400.41 1.25 +2611 6.40E+08 1.61E+07 2.06E+07 0.141425268 31 90452019.26 4.38 +2613 2.87E+08 2.44E+07 3.80E+07 0.204172542 7.54 58499675.71 1.54 +2615 4.25E+08 1.78E+07 5.03E+07 0.266901886 8.44 113300746.7 2.25 +2616 4.73E+08 3.15E+07 6.16E+07 0.135568649 7.69 64189238.78 1.04 +2619 6.65E+08 3.08E+07 3.25E+07 0.142163677 20.48 94498772.19 2.91 +2621 1.49E+09 1.89E+07 3.85E+07 0.154983376 38.68 230524644.2 6 +2623 6.33E+08 1.07E+07 1.60E+07 0.156885761 39.54 99341644 6.2 +2625 4.44E+08 1.09E+07 4.59E+07 0.039260481 9.68 17438244.1 0.38 +2626 1.07E+09 2.64E+07 4.27E+07 0.151990228 25.09 162681017.2 3.81 +2628 2.42E+08 2.19E+07 6.88E+07 0.230600242 3.51 55748605.01 0.81 +2629 4.88E+07 3.45E+06 2.97E+07 0.509528875 1.64 24847911.88 0.84 +2632 6.34E+07 1.80E+06 6.04E+06 0.44132792 10.51 27983188.93 4.64 +2634 3.70E+08 9.26E+06 7.22E+07 0.271745311 5.12 100472685.3 1.39 +3006 6.68E+08 1.91E+07 1.19E+06 0.121900985 561.26 81399904.62 68.42 +3009 2.31E+08 5.86E+06 5.98E+07 3.775101419 3.86 870177559.1 14.55 +3016 1.53E+08 1.83E+05 1.67E+05 1.76515484 911.57 269473090.2 1609.06 +3023 6.87E+08 1.73E+07 1.39E+07 0.025410876 49.28 17451636.75 1.25 +3026 4.98E+08 1.65E+07 5.80E+06 0.021576355 85.88 10741234.87 1.85 +3802 8.09E+08 1.35E+07 1.79E+07 0.071158739 45.24 57571972.82 3.22 +3807 8.91E+08 8.34E+06 2.09E+07 0.093440724 42.54 83278720.61 3.98 +3808 1.01E+09 1.33E+07 3.13E+07 0.100031874 32.29 101176693.2 3.23 +3815 4.04E+08 5.80E+06 2.50E+06 0.059207428 161.8 23918416.16 9.58 +3819 8.77E+08 2.25E+07 3.22E+07 0.068159225 27.26 59791619.21 1.86 +3822 4.75E+06 4.37E+06 2.23E+08 0.223681176 0.02 1061561.56 0 +3824 4.18E+08 1.40E+07 5.36E+07 0.189709427 7.8 79374948.89 1.48 +3825 3.20E+08 1.28E+07 6.70E+06 0.095966822 47.7 30681774.39 4.58 +3830 3.09E+08 8.37E+06 5.01E+07 0.167033284 6.16 51537303.3 1.03 +3837 2.97E+08 8.83E+06 7.87E+06 0.085301418 37.66 25292025.99 3.21 +3846 1.47E+08 6.42E+06 2.01E+07 0.396131322 7.32 58159300.75 2.9 +3853 4.00E+08 1.10E+07 3.38E+06 0.053590099 118.08 21415832.78 6.33 +3909 4.51E+08 1.26E+07 5.98E+06 0.089317487 75.35 40278682.17 6.73 +4213 2.48E+08 6.57E+06 8.68E+06 0.056189146 28.63 13961619.64 1.61 +4233 1.15E+08 4.29E+06 1.48E+06 0.048069618 77.47 5528036.94 3.72 +6905 3.29E+08 1.59E+07 1.42E+06 0.02579479 231.89 8483995.64 5.98 +6907 3.90E+07 1.39E+07 2.29E+06 0.030364173 17.04 1183203.17 0.52 +6910 5.71E+07 2.49E+06 0.00E+00 0.0628197 #DIV/0! 3587967.08 #DIV/0! +6924 3.15E+08 3.09E+06 0.00E+00 0.103199558 #DIV/0! 32491680.53 #DIV/0! +6925 1.98E+08 6.92E+06 8.10E+05 0.057894553 244.13 11446551.05 14.13 +6931 2.27E+08 3.31E+06 1.88E+05 0.064119373 1204.89 14527227.67 77.26 +6937 5.06E+08 4.30E+06 1.91E+07 0.198696653 26.42 100466402.3 5.25 +6938 8.65E+08 5.64E+06 1.37E+07 0.062261605 63.28 53833257.98 3.94 +6944 2.58E+08 6.34E+05 5.22E+07 0.383441877 4.94 98819798.44 1.89 +6945 6.02E+07 3.85E+05 8.52E+04 0.569981343 705.68 34284692.43 402.22 +6948 1.71E+06 9.08E+05 0.00E+00 0.238430233 #DIV/0! 407680.89 #DIV/0! +6949 4.56E+06 4.56E+06 1.14E+06 0.222809281 3.99 1016387.76 0.89 +7116 1.46E+08 1.08E+07 6.47E+05 0.048757905 224.91 7099785.99 10.97 +7151 2.44E+08 3.33E+07 1.13E+06 0.024355087 215.23 5941859.77 5.24 +7405 3.10E+07 2.18E+06 9.62E+02 0.021405075 32182.93 662700.64 688.88 +8319 7.11E+08 3.90E+07 5.83E+08 0.216726992 1.22 154114248.7 0.26 +8405 8.73E+08 1.89E+07 4.60E+06 0.056996021 189.6 49758681.15 10.81 +8406 7.48E+08 2.11E+07 3.92E+07 0.086834152 19.08 64922949.38 1.66 +8408 6.20E+08 4.22E+07 2.79E+06 0.083540245 222 51820135.74 18.55 +8409 1.12E+09 1.89E+07 3.83E+06 0.111183394 291.99 124327140 32.46 +8411 1.76E+08 3.86E+07 1.02E+09 0.268864859 0.17 47374851.51 0.05 +8413 9.39E+08 4.19E+06 8.79E+08 0.160511247 1.07 150689859.9 0.17 +8415 2.02E+08 4.97E+06 1.45E+04 0.083200478 13912.43 16803724.26 1157.52 +8416 1.58E+09 2.05E+07 6.32E+07 0.057697208 25.03 91240675.37 1.44 +8418 4.46E+08 1.59E+07 1.02E+06 0.106926675 435.46 47697975.41 46.56 +8423 1.35E+08 2.15E+07 3.62E+08 0.189643651 0.37 25657771.45 0.07 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20150929_104857 +Canton Demande Transfert Prélèvement +101 1996158 1783185 6499877 +102 7687473 3400833 2413366 +117 9455593 7258844 91905910 +118 1680197 927009 2453297 +119 14913805 6312142 43064810 +120 9237155 1711280 38370381 +126 11232811 6685477 NA +135 17869980 4641881 2295350 +140 3314343 270645 4462465 +410 23752815 11540980 186718228 +413 66140247 35286946 12512294 +414 17931836 13574601 222687403 +416 7107337 6059189 74806347 +419 5845312 3662314 82630600 +420 17520059 6171854 190284 +421 75720778 26433874 335272 +427 10278137 7630313 26832053 +429 49227482 22740889 27954453 +430 33127516 16188766 404505420 +505 14600968 2274441 16188929 +509 25129456 4695078 132436999 +512 18912310 9735314 82909973 +515 2471809 1012424 1771588 +516 3074600 1865905 23653875 +518 10473109 11717838 41345088 +522 21412517 9366498 20826822 +523 18264312 6340220 680735 +524 8074271 4877621 30607462 +717 10031680 5085912 3648035 +722 13948734 6801148 3874220 +724 16619700 9426993 1350980 +1307 17064116 4277259 118665741 +1309 36650504 4117045 1095579683 +1312 12619219 4506874 366161408 +1326 46989427 6548293 318905499 +1327 68286983 26937707 752308731 +1331 9451031 208286 461656691 +1333 7396113 159803 464597 +2103 9961064 6399730 2442947 +2114 16205546 8467377 5050277 +2134 12843182 8562008 2109520 +2138 5076429 1024085 945883 +2602 49359090 23575813 143483131 +2604 38660028 7771437 13594550 +2607 54079207 26195595 63587596 +2611 16775211 10616882 20631386 +2613 17621885 11509789 38013023 +2615 36739152 9617466 50325536 +2616 31442824 16668205 61577479 +2619 33273439 15718516 32457630 +2621 37289396 12905994 38450839 +2623 13850243 7507610 16015650 +2625 30732207 6535981 45874726 +2626 30097727 17562311 42665939 +2628 14259424 10142105 68835227 +2629 3077127 1943318 29721243 +2632 3385242 1185780 6035656 +2634 18301410 6272122 72171351 +3006 27950055 9408741 1189743 +3009 16492248 2748523 59789500 +3016 6634166 132071 167472 +3023 49016518 9031538 13936608 +3026 38721423 10067815 5796988 +3802 18739994 9207440 17882199 +3807 19260472 5236084 20948722 +3808 29503393 7550335 31321341 +3815 8873286 3972679 2496776 +3819 28402029 9938117 32182113 +3822 1866608 1939115 222929443 +3824 11516666 7299471 53644868 +3825 8447658 6913428 6702247 +3830 7561324 5178746 50108807 +3837 8031664 4500270 7873546 +3846 5134188 3235131 20060195 +3853 10386788 6057212 3384292 +3909 12733975 7597939 5984921 +4213 11272915 3709961 8678187 +4233 4757650 2976539 1484500 +6905 15762446 8522970 1418378 +6907 7306081 6905993 2286260 +6910 3758336 1720766 0 +6924 13162946 2749781 0 +6925 8741147 3229220 809862 +6931 10203010 2441791 188038 +6937 11804344 2213880 19138972 +6938 21645031 3468583 13662895 +6944 5674570 454360 52156948 +6945 1381959 194632 85238 +6948 560169 569808 0 +6949 2188410 2739301 1142848 +7116 9540910 5070259 647440 +7151 21781650 16114095 1133515 +7405 1635286 1121283 962 +8319 48927329 17824441 583051055 +8405 37675022 8264317 4604623 +8406 40588871 13847918 39176247 +8408 42493477 20031575 2794154 +8409 42588632 12716137 3829598 +8411 28770502 15244993 1023324262 +8413 44355339 2610670 879267009 +8415 18259665 4183593 14517 +8416 54608233 11430216 63175054 +8418 28969145 10270372 1024393 +8423 12822393 9148334 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151015_085135 +Canton Demande Transfert Prélèvement +101 42979140 4801520 6499877 +102 187359319 6273717 2413366 +117 216059200 18648976 91905910 +118 68593511 1538618 2453297 +119 588804225 13604335 43064810 +120 420454183 2541609 38370381 +126 180568860 13952856 NA +135 558255954 2155895 2295350 +140 111693985 421917 4462465 +410 254082334 17158183 186718228 +413 485701376 83434054 12512294 +414 179993169 26584063 222687403 +416 25895785 10299408 74806347 +419 55400357 5942310 82630600 +420 191065162 10427296 190284 +421 1817307647 52587979 335272 +427 82979864 11088314 26832053 +429 933788908 54814805 27954453 +430 108956355 31264702 404505420 +505 290253991 3290760 16188929 +509 442091738 6684442 132436999 +512 117082440 19097835 82909973 +515 19940068 1202159 1771588 +516 24898521 2287386 23653875 +518 50950139 29276102 41345088 +522 185138312 15404135 20826822 +523 198515886 11101468 680735 +524 68162706 8771838 30607462 +717 203556730 7355357 3648035 +722 210205986 11906495 3874220 +724 198165482 18254973 1350980 +1307 550930091 7650632 118665741 +1309 853145445 7624919 1095579683 +1312 302847151 10919747 366161408 +1326 1409395213 13855045 318905499 +1327 548789459 52152364 752308731 +1331 266023760 216994 461656691 +1333 179989808 355754 464597 +2103 131477200 13073364 2442947 +2114 370595007 16642733 5050277 +2134 152101047 17859510 2109520 +2138 128194939 2199824 945883 +2602 758636766 43491934 143483131 +2604 953046165 10382413 13594550 +2607 587491021 44568087 63587596 +2611 639574672 16130373 20631386 +2613 286520779 24381716 38013023 +2615 424503358 17829166 50325536 +2616 473481438 31527336 61577479 +2619 664718120 30836336 32457630 +2621 1487415296 18915236 38450839 +2623 633210070 10733625 16015650 +2625 444167864 10850438 45874726 +2626 1070338661 26387767 42665939 +2628 241754321 21895140 68835227 +2629 48766445 3450860 29721243 +2632 63406795 1802414 6035656 +2634 369731073 9257434 72171351 +3006 667754280 19105243 1189743 +3009 230504419 5859455 59789500 +3016 152662579 182571 167472 +3023 686778255 17290748 13936608 +3026 497824357 16481216 5796988 +3802 809063984 13525368 17882199 +3807 891246521 8335006 20948722 +3808 1011444546 13348402 31321341 +3815 403976615 5795513 2496776 +3819 877234438 22542101 32182113 +3822 4745869 4367639 222929443 +3824 418402765 13973297 53644868 +3825 319712312 12763988 6702247 +3830 308545112 8365927 50108807 +3837 296501822 8834594 7873546 +3846 146818233 6416504 20060195 +3853 399622939 11027480 3384292 +3909 450960765 12577269 5984921 +4213 248475384 6572661 8678187 +4233 115000641 4289516 1484500 +6905 328903453 15904361 1418378 +6907 38967080 13931640 2286260 +6910 57115317 2490163 0 +6924 314843214 3087119 0 +6925 197713782 6923700 809862 +6931 226565341 3310428 188038 +6937 505627049 4303772 19138972 +6938 864630105 5639857 13662895 +6944 257717804 633639 52156948 +6945 60150552 385322 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 145613023 10808483 647440 +7151 243967915 33333109 1133515 +7405 30959977 2175478 962 +8319 711098545 39019974 583051055 +8405 873020258 18932685 4604623 +8406 747666074 21054244 39176247 +8408 620301455 42150720 2794154 +8409 1118216804 18925529 3829598 +8411 176203211 38645753 1023324262 +8413 938811842 4190309 879267009 +8415 201966679 4966839 14517 +8416 1581370729 20465273 63175054 +8418 446081162 15901210 1024393 +8423 135294650 21459288 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151015_085135 +Canton Demande Transfert Prélèvement +101 42979140 4801520 6499877 +102 187359319 6273717 2413366 +117 216059200 18648976 91905910 +118 68593511 1538618 2453297 +119 588804225 13604335 43064810 +120 420454183 2541609 38370381 +126 180568860 13952856 0 +135 558255954 2155895 2295350 +140 111693985 421917 4462465 +410 254082334 17158183 186718228 +413 485701376 83434054 12512294 +414 179993169 26584063 222687403 +416 25895785 10299408 74806347 +419 55400357 5942310 82630600 +420 191065162 10427296 190284 +421 1817307647 52587979 335272 +427 82979864 11088314 26832053 +429 933788908 54814805 27954453 +430 108956355 31264702 404505420 +505 290253991 3290760 16188929 +509 442091738 6684442 132436999 +512 117082440 19097835 82909973 +515 19940068 1202159 1771588 +516 24898521 2287386 23653875 +518 50950139 29276102 41345088 +522 185138312 15404135 20826822 +523 198515886 11101468 680735 +524 68162706 8771838 30607462 +717 203556730 7355357 3648035 +722 210205986 11906495 3874220 +724 198165482 18254973 1350980 +1307 550930091 7650632 118665741 +1309 853145445 7624919 1095579683 +1312 302847151 10919747 366161408 +1326 1409395213 13855045 318905499 +1327 548789459 52152364 752308731 +1331 266023760 216994 461656691 +1333 179989808 355754 464597 +2103 131477200 13073364 2442947 +2114 370595007 16642733 5050277 +2134 152101047 17859510 2109520 +2138 128194939 2199824 945883 +2602 758636766 43491934 143483131 +2604 953046165 10382413 13594550 +2607 587491021 44568087 63587596 +2611 639574672 16130373 20631386 +2613 286520779 24381716 38013023 +2615 424503358 17829166 50325536 +2616 473481438 31527336 61577479 +2619 664718120 30836336 32457630 +2621 1487415296 18915236 38450839 +2623 633210070 10733625 16015650 +2625 444167864 10850438 45874726 +2626 1070338661 26387767 42665939 +2628 241754321 21895140 68835227 +2629 48766445 3450860 29721243 +2632 63406795 1802414 6035656 +2634 369731073 9257434 72171351 +3006 667754280 19105243 1189743 +3009 230504419 5859455 59789500 +3016 152662579 182571 167472 +3023 686778255 17290748 13936608 +3026 497824357 16481216 5796988 +3802 809063984 13525368 17882199 +3807 891246521 8335006 20948722 +3808 1011444546 13348402 31321341 +3815 403976615 5795513 2496776 +3819 877234438 22542101 32182113 +3822 4745869 4367639 222929443 +3824 418402765 13973297 53644868 +3825 319712312 12763988 6702247 +3830 308545112 8365927 50108807 +3837 296501822 8834594 7873546 +3846 146818233 6416504 20060195 +3853 399622939 11027480 3384292 +3909 450960765 12577269 5984921 +4213 248475384 6572661 8678187 +4233 115000641 4289516 1484500 +6905 328903453 15904361 1418378 +6907 38967080 13931640 2286260 +6910 57115317 2490163 0 +6924 314843214 3087119 0 +6925 197713782 6923700 809862 +6931 226565341 3310428 188038 +6937 505627049 4303772 19138972 +6938 864630105 5639857 13662895 +6944 257717804 633639 52156948 +6945 60150552 385322 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 145613023 10808483 647440 +7151 243967915 33333109 1133515 +7405 30959977 2175478 962 +8319 711098545 39019974 583051055 +8405 873020258 18932685 4604623 +8406 747666074 21054244 39176247 +8408 620301455 42150720 2794154 +8409 1118216804 18925529 3829598 +8411 176203211 38645753 1023324262 +8413 938811842 4190309 879267009 +8415 201966679 4966839 14517 +8416 1581370729 20465273 63175054 +8418 446081162 15901210 1024393 +8423 135294650 21459288 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151015_085135 +Canton Demande Transfert Prélèvement +101 42979140 4801520 6499877 +102 187359319 6273717 2413366 +117 216059200 18648976 91905910 +118 68593511 1538618 2453297 +119 588804225 13604335 43064810 +120 420454183 2541609 38370381 +126 180568860 13952856 0 +135 558255954 2155895 2295350 +140 111693985 421917 4462465 +410 254082334 17158183 186718228 +413 485701376 83434054 12512294 +414 179993169 26584063 222687403 +416 25895785 10299408 74806347 +419 55400357 5942310 82630600 +420 191065162 10427296 0 +421 1817307647 52587979 335272 +427 82979864 11088314 26832053 +429 933788908 54814805 27954453 +430 108956355 31264702 404505420 +505 290253991 3290760 16188929 +509 442091738 6684442 132436999 +512 117082440 19097835 82909973 +515 19940068 1202159 1771588 +516 24898521 2287386 23653875 +518 50950139 29276102 41345088 +522 185138312 15404135 20826822 +523 198515886 11101468 0 +524 68162706 8771838 30607462 +717 203556730 7355357 3648035 +722 210205986 11906495 3874220 +724 198165482 18254973 1350980 +1307 550930091 7650632 118665741 +1309 853145445 7624919 1095579683 +1312 302847151 10919747 366161408 +1326 1409395213 13855045 318905499 +1327 548789459 52152364 752308731 +1331 266023760 216994 461656691 +1333 179989808 355754 0 +2103 131477200 13073364 2442947 +2114 370595007 16642733 5050277 +2134 152101047 17859510 2109520 +2138 128194939 2199824 945883 +2602 758636766 43491934 143483131 +2604 953046165 10382413 13594550 +2607 587491021 44568087 63587596 +2611 639574672 16130373 20631386 +2613 286520779 24381716 38013023 +2615 424503358 17829166 50325536 +2616 473481438 31527336 61577479 +2619 664718120 30836336 32457630 +2621 1487415296 18915236 38450839 +2623 633210070 10733625 16015650 +2625 444167864 10850438 45874726 +2626 1070338661 26387767 42665939 +2628 241754321 21895140 68835227 +2629 48766445 3450860 29721243 +2632 63406795 1802414 6035656 +2634 369731073 9257434 72171351 +3006 667754280 19105243 1189743 +3009 230504419 5859455 59789500 +3016 152662579 182571 167472 +3023 686778255 17290748 13936608 +3026 497824357 16481216 5796988 +3802 809063984 13525368 17882199 +3807 891246521 8335006 20948722 +3808 1011444546 13348402 31321341 +3815 403976615 5795513 2496776 +3819 877234438 22542101 32182113 +3822 4745869 4367639 222929443 +3824 418402765 13973297 53644868 +3825 319712312 12763988 6702247 +3830 308545112 8365927 50108807 +3837 296501822 8834594 7873546 +3846 146818233 6416504 20060195 +3853 399622939 11027480 3384292 +3909 450960765 12577269 5984921 +4213 248475384 6572661 8678187 +4233 115000641 4289516 1484500 +6905 328903453 15904361 1418378 +6907 38967080 13931640 2286260 +6910 57115317 2490163 0 +6924 314843214 3087119 0 +6925 197713782 6923700 809862 +6931 226565341 3310428 188038 +6937 505627049 4303772 19138972 +6938 864630105 5639857 13662895 +6944 257717804 633639 52156948 +6945 60150552 385322 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 145613023 10808483 647440 +7151 243967915 33333109 1133515 +7405 30959977 2175478 0 +8319 711098545 39019974 583051055 +8405 873020258 18932685 4604623 +8406 747666074 21054244 39176247 +8408 620301455 42150720 0 +8409 1118216804 18925529 3829598 +8411 176203211 38645753 1023324262 +8413 938811842 4190309 879267009 +8415 201966679 4966839 0 +8416 1581370729 20465273 63175054 +8418 446081162 15901210 1024393 +8423 135294650 21459288 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151016_135624 +Canton Demande Transfert Prélèvement +101 1996158 1783185 6499877 +102 7687473 3400833 2413366 +117 9455593 7258844 91905910 +118 1680197 927009 2453297 +119 14308619 6294197 43064810 +120 9237155 1711280 38370381 +126 11232811 6685477 0 +135 4874977 1866152 2295350 +140 3314343 270645 4462465 +410 23752815 11540980 186718228 +413 66140247 35286946 12512294 +414 17931836 13574601 222687403 +416 7107337 6059189 74806347 +419 5845312 3662314 82630600 +420 16995266 6732905 0 +421 75720778 26433874 335272 +427 10174119 7716130 26832053 +429 49227482 22740889 27954453 +430 33127516 16188766 404505420 +505 14636429 2380740 16188929 +509 25129456 4695078 132436999 +512 18878906 9768820 82909973 +515 2471809 1012424 1771588 +516 3051004 1887093 23653875 +518 10473109 11717838 41345088 +522 21323816 9462266 20826822 +523 18245639 6359154 0 +524 8074271 4877621 30607462 +717 9697337 5297804 3648035 +722 14018373 6615031 3874220 +724 16195728 9852300 1350980 +1307 17064116 4277259 118665741 +1309 36650504 4117045 1095579683 +1312 12619219 4506874 366161408 +1326 46989427 6548293 318905499 +1327 67880440 27565364 752308731 +1331 9451031 208286 461656691 +1333 7396113 159803 0 +2103 9960646 6400223 2442947 +2114 15972432 8797688 5050277 +2134 12832918 8574757 2109520 +2138 5047575 1094448 945883 +2602 49359090 23575813 143483131 +2604 38660028 7771437 13594550 +2607 53979211 26297464 63587596 +2611 16775211 10616882 20631386 +2613 17559605 11524079 38013023 +2615 35973463 10595227 50325536 +2616 30782424 17322580 61577479 +2619 33273439 15718516 32457630 +2621 37289396 12905994 38450839 +2623 13850243 7507610 16015650 +2625 30726660 6540253 45874726 +2626 30097727 17562311 42665939 +2628 14259424 10142105 68835227 +2629 3077127 1943318 29721243 +2632 3385242 1185780 6035656 +2634 18301410 6272122 72171351 +3006 27949268 9409280 1189743 +3009 16491598 2748813 59789500 +3016 6634166 132071 167472 +3023 48536034 9525839 13936608 +3026 38223860 10643678 5796988 +3802 18739994 9207440 17882199 +3807 19260472 5236084 20948722 +3808 29503393 7550335 31321341 +3815 8873286 3972679 2496776 +3819 28402029 9938117 32182113 +3822 1866608 1939115 222929443 +3824 11516666 7299471 53644868 +3825 8447658 6913428 6702247 +3830 7519680 5272942 50108807 +3837 8031664 4500270 7873546 +3846 5134188 3235131 20060195 +3853 10386788 6057212 3384292 +3909 12744426 7614302 5984921 +4213 11693931 3077089 8678187 +4233 4757650 2976539 1484500 +6905 15992504 8146471 1418378 +6907 7306081 6905993 2286260 +6910 3696690 1762059 0 +6924 13297084 2505133 0 +6925 8767375 3305595 809862 +6931 10224646 2413292 188038 +6937 11804344 2213880 19138972 +6938 21645031 3468583 13662895 +6944 5655726 477313 52156948 +6945 1381959 194632 85238 +6948 560169 569808 0 +6949 2188410 2739301 1142848 +7116 9540910 5070259 647440 +7151 21341894 16632570 1133515 +7405 1635286 1121283 0 +8319 49352421 18303609 583051055 +8405 36910727 9006609 4604623 +8406 40577568 13841830 39176247 +8408 42085387 20788634 0 +8409 42103161 13086249 3829598 +8411 24825734 19806428 1023324262 +8413 43633760 3450661 879267009 +8415 18247649 4199201 0 +8416 54558903 11448215 63175054 +8418 28904582 10343392 1024393 +8423 12822393 9148334 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151027_181009 +Canton Demande Transfert Prélèvement +101 11070584 3692728 6499877 +102 44564343 5488964 2413366 +117 55074493 13308500 91905910 +118 16005575 1659354 2453297 +119 135888495 10693247 43064810 +120 93686144 2419641 38370381 +126 46814214 12281660 0 +135 124056879 2155895 2295350 +140 24906011 409147 4462465 +410 60911058 16198018 186718228 +413 149914052 65144271 12512294 +414 51096119 21967354 222687403 +416 11399086 8523399 74806347 +419 14402162 5569987 82630600 +420 45369030 9708078 0 +421 426169722 48882651 335272 +427 22308633 9981772 26832053 +429 230763310 45115731 27954453 +430 49274065 31592988 404505420 +505 65137572 3016612 16188929 +509 99306072 6259180 132436999 +512 35900353 15547780 82909973 +515 4530008 1136075 1771588 +516 6004786 2381505 23653875 +518 26194425 26629562 41345088 +522 45830502 12839891 20826822 +523 49341556 9420301 0 +524 17864905 7180299 30607462 +717 44974816 7437001 3648035 +722 53066355 10820363 3874220 +724 51193292 16312425 1350980 +1307 125643290 7725479 118665741 +1309 194488804 7966318 1095579683 +1312 71510508 9032462 366161408 +1326 318108220 11605195 318905499 +1327 150347778 45499086 752308731 +1331 59116391 216994 461656691 +1333 40066967 342482 0 +2103 35800605 11557939 2442947 +2114 87703646 15701628 5050277 +2134 43312065 15441580 2109520 +2138 29488881 2027891 945883 +2602 199480243 38709884 143483131 +2604 212983618 10021750 13594550 +2607 145481175 35224646 63587596 +2611 146358305 16602640 20631386 +2613 74375742 23204634 38013023 +2615 102263341 17565049 50325536 +2616 120768194 27641798 61577479 +2619 161470572 25435995 32457630 +2621 335143087 17598952 38450839 +2623 145930986 11515535 16015650 +2625 102354257 10396889 45874726 +2626 245940570 28319537 42665939 +2628 66846580 18999377 68835227 +2629 13270580 3341151 29721243 +2632 15714685 2142686 6035656 +2634 83906566 10147820 72171351 +3006 159846772 15545791 1189743 +3009 53804382 4810260 59789500 +3016 33925039 182564 167472 +3023 162387067 15505485 13936608 +3026 115518134 15349396 5796988 +3802 181602196 13271583 17882199 +3807 199360202 7866183 20948722 +3808 229511003 12940014 31321341 +3815 90346002 5858714 2496776 +3819 202334103 17822221 32182113 +3822 3614582 3614455 222929443 +3824 96235861 12275769 53644868 +3825 74156504 11923861 6702247 +3830 70952811 7896863 50108807 +3837 69137389 8969147 7873546 +3846 34525919 6407858 20060195 +3853 91575764 11559502 3384292 +3909 105446301 12636385 5984921 +4213 58590406 4769086 8678187 +4233 25806256 4529951 1484500 +6905 80405499 14094624 1418378 +6907 16107584 11970816 2286260 +6910 13917426 3539921 0 +6924 70216445 3047392 0 +6925 46260774 5870017 809862 +6931 50395147 3309480 188038 +6937 114361991 3786957 19138972 +6938 193674170 5951121 13662895 +6944 57386808 614889 52156948 +6945 13613249 335263 85238 +6948 935103 1110794 0 +6949 4113879 5419771 1142848 +7116 38246077 9352110 647440 +7151 72383211 29103091 1133515 +7405 7714358 1964614 0 +8319 179475498 36300359 583051055 +8405 201423262 18631940 4604623 +8406 172756496 20230152 39176247 +8408 157846563 38320920 0 +8409 249949967 18937203 3829598 +8411 59121349 34818900 1023324262 +8413 209432823 3537892 879267009 +8415 45313901 4739437 0 +8416 364640103 20014730 63175054 +8418 104330372 14647132 1024393 +8423 42521218 17583940 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151027_181103 +Canton Demande Transfert Prélèvement +101 4411551 939714 6499877 +102 19193080 1904917 2413366 +117 17137982 3146166 91905910 +118 5384870 248335 2453297 +119 39724772 3333009 43064810 +120 28572861 602906 38370381 +126 20370252 4010971 0 +135 3329636 41545 2295350 +140 9370026 126826 4462465 +410 49041804 9045488 186718228 +413 108799242 43223714 12512294 +414 36471893 12566341 222687403 +416 8338760 4701747 74806347 +419 11156188 2892086 82630600 +420 36125396 5304074 0 +421 239987552 19621790 335272 +427 14171156 7631994 26832053 +429 135239559 19355887 27954453 +430 34649496 14352647 404505420 +505 41648050 1604254 16188929 +509 71268693 3247785 132436999 +512 26293828 8151833 82909973 +515 4100382 851162 1771588 +516 3795558 1605197 23653875 +518 9501392 7476541 41345088 +522 35652808 7715364 20826822 +523 36446271 5777065 0 +524 12790994 4621686 30607462 +717 23357822 3003894 3648035 +722 30419253 4562962 3874220 +724 32291699 6285890 1350980 +1307 94316531 2606931 118665741 +1309 145158701 3988864 1095579683 +1312 53871996 3772536 366161408 +1326 233376212 4310009 318905499 +1327 126465647 34744547 752308731 +1331 43998920 112295 461656691 +1333 29762605 97208 0 +2103 15954988 3485039 2442947 +2114 38990440 3906649 5050277 +2134 19728555 5177987 2109520 +2138 12937939 532336 945883 +2602 119549087 17098656 143483131 +2604 124660636 4523054 13594550 +2607 100700817 21074485 63587596 +2611 56392211 8305936 20631386 +2613 38851982 9336791 38013023 +2615 63005577 8604733 50325536 +2616 71688197 14288604 61577479 +2619 107888609 11921534 32457630 +2621 153441342 6409813 38450839 +2623 56938646 3378889 16015650 +2625 74294355 4271928 45874726 +2626 101913873 11355724 42665939 +2628 30125142 6971365 68835227 +2629 7226395 1429518 29721243 +2632 9816217 1359197 6035656 +2634 54724613 3664776 72171351 +3006 111574104 9364535 1189743 +3009 41581946 1944743 59789500 +3016 25416606 86019 167472 +3023 118774928 7919472 13936608 +3026 90704001 7758380 5796988 +3802 71692018 4734548 17882199 +3807 87367113 1817072 20948722 +3808 117189235 3189110 31321341 +3815 40841427 1965402 2496776 +3819 95479003 5320751 32182113 +3822 916411 896552 222929443 +3824 39665848 3089794 53644868 +3825 29256271 3468501 6702247 +3830 20364280 1846525 50108807 +3837 29415168 2612411 7873546 +3846 17329238 2086528 20060195 +3853 42332173 2463161 3384292 +3909 57649219 3861574 5984921 +4213 28666657 1721708 8678187 +4233 10752036 1231557 1484500 +6905 41396604 4625389 1418378 +6907 7385558 4958841 2286260 +6910 7685145 583895 0 +6924 34249364 1202756 0 +6925 22603082 2254116 809862 +6931 26063845 1124645 188038 +6937 59697313 1379689 19138972 +6938 98968669 1960885 13662895 +6944 28765727 178722 52156948 +6945 8890658 83972 85238 +6948 345491 345491 0 +6949 1469500 2392380 1142848 +7116 19220071 3119524 647440 +7151 33677424 11172549 1133515 +7405 2766950 471615 0 +8319 118332917 16580860 583051055 +8405 159413598 8546771 4604623 +8406 119853354 10674287 39176247 +8408 116384206 20739566 0 +8409 184475607 8773705 3829598 +8411 38522535 16457258 1023324262 +8413 162061503 2415937 879267009 +8415 37661529 3152326 0 +8416 229191682 10379374 63175054 +8418 82638160 7905139 1024393 +8423 28534940 9398662 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_112352 +Canton Demande Transfert Prélèvement +101 9056559 5017698 6499877 +102 37973118 12698507 2413366 +117 55637579 24591351 91905910 +118 12772066 3029659 2453297 +119 105365196 31970376 43064810 +120 106496285 12538133 38370381 +126 107513935 17178977 0 +135 127638035 11283353 2295350 +140 23170336 5119410 4462465 +410 96011540 26686957 186718228 +413 332229572 104601917 12512294 +414 105266287 30820580 222687403 +416 16530388 11983294 74806347 +419 7698372 7698372 82630600 +420 20332532 19514850 0 +421 1206778769 100005154 335272 +427 53542304 13737466 26832053 +429 610411709 78621352 27954453 +430 89223277 38395538 404505420 +505 134255710 13715962 16188929 +509 252905500 20968374 132436999 +512 82166377 24523885 82909973 +515 12615537 2482267 1771588 +516 16546021 4113323 23653875 +518 36727161 29817993 41345088 +522 93757932 29929135 20826822 +523 119113124 19365444 0 +524 39469767 12797170 30607462 +717 81385514 14214867 3648035 +722 134751690 16221803 3874220 +724 72805777 28108893 1350980 +1307 253682954 17299975 118665741 +1309 471813858 28872566 1095579683 +1312 127369323 17863077 366161408 +1326 488961888 43790988 318905499 +1327 457979597 66795253 752308731 +1331 120524207 6706880 461656691 +1333 4319205 4324711 0 +2103 66087446 16261376 2442947 +2114 147351309 25122892 5050277 +2134 94945689 20518068 2109520 +2138 104687191 3550393 945883 +2602 327743131 65169766 143483131 +2604 488486554 33254508 13594550 +2607 395192495 63160273 63587596 +2611 225643033 28008568 20631386 +2613 191580718 28825744 38013023 +2615 254871981 34824990 50325536 +2616 203215837 45947246 61577479 +2619 438190012 42052928 32457630 +2621 384538309 50556028 38450839 +2623 289901640 21177243 16015650 +2625 224492796 26871976 45874726 +2626 413548588 48309566 42665939 +2628 119866515 28185194 68835227 +2629 24794124 4355731 29721243 +2632 19073888 4495136 6035656 +2634 209165623 22259457 72171351 +3006 249455425 35132919 1189743 +3009 104737703 12075754 59789500 +3016 64791165 6575784 167472 +3023 118659932 34142095 13936608 +3026 237115095 32572598 5796988 +3802 410968175 31277310 17882199 +3807 239249437 33380194 20948722 +3808 404452379 42220139 31321341 +3815 118735337 17180301 2496776 +3819 297067414 44289987 32182113 +3822 4531078 4405150 222929443 +3824 157002836 18825350 53644868 +3825 168373780 18787110 6702247 +3830 114100650 11832383 50108807 +3837 118516492 14825087 7873546 +3846 64473041 8177663 20060195 +3853 127350687 15581542 3384292 +3909 155930062 21568274 5984921 +4213 124964471 12369871 8678187 +4233 37452600 7718783 1484500 +6905 132132442 23510116 1418378 +6907 24109880 14729155 2286260 +6910 15828570 6384489 0 +6924 116526124 10700488 0 +6925 120882106 10027221 809862 +6931 81249403 10740794 188038 +6937 138176008 13731132 19138972 +6938 243931443 25393117 13662895 +6944 78273208 9624070 52156948 +6945 14982249 1942131 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 86022577 13230878 647440 +7151 166318550 36819989 1133515 +7405 3695289 2601612 0 +8319 413797955 60809231 583051055 +8405 373546882 40907530 4604623 +8406 501593408 36682581 39176247 +8408 267313093 64729302 0 +8409 615149443 42047331 3829598 +8411 68488688 46722452 1023324262 +8413 334434716 37035501 879267009 +8415 98160476 15472040 0 +8416 704602033 53624000 63175054 +8418 61536157 23161506 1024393 +8423 40976638 28661494 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_140608 +Canton Demande Transfert Prélèvement +101 9056559 5017698 6499877 +102 37973118 12698507 2413366 +117 55637579 24591351 91905910 +118 12772066 3029659 2453297 +119 105365196 31970376 43064810 +120 106496285 12538133 38370381 +126 107513935 17178977 0 +135 127638035 11283353 2295350 +140 23170336 5119410 4462465 +410 96011540 26686957 186718228 +413 332229572 104601917 12512294 +414 105266287 30820580 222687403 +416 16530388 11983294 74806347 +419 7698372 7698372 82630600 +420 20332532 19514850 0 +421 1206778769 100005154 335272 +427 53542304 13737466 26832053 +429 610411709 78621352 27954453 +430 89223277 38395538 404505420 +505 134255710 13715962 16188929 +509 252905500 20968374 132436999 +512 82166377 24523885 82909973 +515 12615537 2482267 1771588 +516 16546021 4113323 23653875 +518 36727161 29817993 41345088 +522 93757932 29929135 20826822 +523 119113124 19365444 0 +524 39469767 12797170 30607462 +717 81385514 14214867 3648035 +722 134751690 16221803 3874220 +724 72805777 28108893 1350980 +1307 253682954 17299975 118665741 +1309 471813858 28872566 1095579683 +1312 127369323 17863077 366161408 +1326 488961888 43790988 318905499 +1327 457979597 66795253 752308731 +1331 120524207 6706880 461656691 +1333 4319205 4324711 0 +2103 66087446 16261376 2442947 +2114 147351309 25122892 5050277 +2134 94945689 20518068 2109520 +2138 104687191 3550393 945883 +2602 327743131 65169766 143483131 +2604 488486554 33254508 13594550 +2607 395192495 63160273 63587596 +2611 225643033 28008568 20631386 +2613 191580718 28825744 38013023 +2615 254871981 34824990 50325536 +2616 203215837 45947246 61577479 +2619 438190012 42052928 32457630 +2621 384538309 50556028 38450839 +2623 289901640 21177243 16015650 +2625 224492796 26871976 45874726 +2626 413548588 48309566 42665939 +2628 119866515 28185194 68835227 +2629 24794124 4355731 29721243 +2632 19073888 4495136 6035656 +2634 209165623 22259457 72171351 +3006 249455425 35132919 1189743 +3009 104737703 12075754 59789500 +3016 64791165 6575784 167472 +3023 118659932 34142095 13936608 +3026 237115095 32572598 5796988 +3802 410968175 31277310 17882199 +3807 239249437 33380194 20948722 +3808 404452379 42220139 31321341 +3815 118735337 17180301 2496776 +3819 297067414 44289987 32182113 +3822 4531078 4405150 222929443 +3824 157002836 18825350 53644868 +3825 168373780 18787110 6702247 +3830 114100650 11832383 50108807 +3837 118516492 14825087 7873546 +3846 64473041 8177663 20060195 +3853 127350687 15581542 3384292 +3909 155930062 21568274 5984921 +4213 124964471 12369871 8678187 +4233 37452600 7718783 1484500 +6905 132132442 23510116 1418378 +6907 24109880 14729155 2286260 +6910 15828570 6384489 0 +6924 116526124 10700488 0 +6925 120882106 10027221 809862 +6931 81249403 10740794 188038 +6937 138176008 13731132 19138972 +6938 243931443 25393117 13662895 +6944 78273208 9624070 52156948 +6945 14982249 1942131 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 86022577 13230878 647440 +7151 166318550 36819989 1133515 +7405 3695289 2601612 0 +8319 413797955 60809231 583051055 +8405 373546882 40907530 4604623 +8406 501593408 36682581 39176247 +8408 267313093 64729302 0 +8409 615149443 42047331 3829598 +8411 68488688 46722452 1023324262 +8413 334434716 37035501 879267009 +8415 98160476 15472040 0 +8416 704602033 53624000 63175054 +8418 61536157 23161506 1024393 +8423 40976638 28661494 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_140608 +Canton Demande Transfert Prélèvement +101 9056559 5017698 6499877 +102 37973118 12698507 2413366 +117 55637579 24591351 91905910 +118 12772066 3029659 2453297 +119 105365196 31970376 43064810 +120 106496285 12538133 38370381 +126 107513935 17178977 0 +135 127638035 11283353 2295350 +140 23170336 5119410 4462465 +410 96011540 26686957 186718228 +413 332229572 104601917 12512294 +414 105266287 30820580 222687403 +416 16530388 11983294 74806347 +419 7698372 7698372 82630600 +420 20332532 19514850 0 +421 1206778769 100005154 335272 +427 53542304 13737466 26832053 +429 610411709 78621352 27954453 +430 89223277 38395538 404505420 +505 134255710 13715962 16188929 +509 252905500 20968374 132436999 +512 82166377 24523885 82909973 +515 12615537 2482267 1771588 +516 16546021 4113323 23653875 +518 36727161 29817993 41345088 +522 93757932 29929135 20826822 +523 119113124 19365444 0 +524 39469767 12797170 30607462 +717 81385514 14214867 3648035 +722 134751690 16221803 3874220 +724 72805777 28108893 1350980 +1307 253682954 17299975 118665741 +1309 471813858 28872566 1095579683 +1312 127369323 17863077 366161408 +1326 488961888 43790988 318905499 +1327 457979597 66795253 752308731 +1331 120524207 6706880 461656691 +1333 4319205 4324711 0 +2103 66087446 16261376 2442947 +2114 147351309 25122892 5050277 +2134 94945689 20518068 2109520 +2138 104687191 3550393 945883 +2602 327743131 65169766 143483131 +2604 488486554 33254508 13594550 +2607 395192495 63160273 63587596 +2611 225643033 28008568 20631386 +2613 191580718 28825744 38013023 +2615 254871981 34824990 50325536 +2616 203215837 45947246 61577479 +2619 438190012 42052928 32457630 +2621 384538309 50556028 38450839 +2623 289901640 21177243 16015650 +2625 224492796 26871976 45874726 +2626 413548588 48309566 42665939 +2628 119866515 28185194 68835227 +2629 24794124 4355731 29721243 +2632 19073888 4495136 6035656 +2634 209165623 22259457 72171351 +3006 249455425 35132919 1189743 +3009 104737703 12075754 59789500 +3016 64791165 6575784 167472 +3023 118659932 34142095 13936608 +3026 237115095 32572598 5796988 +3802 410968175 31277310 17882199 +3807 239249437 33380194 20948722 +3808 404452379 42220139 31321341 +3815 118735337 17180301 2496776 +3819 297067414 44289987 32182113 +3822 4531078 4405150 222929443 +3824 157002836 18825350 53644868 +3825 168373780 18787110 6702247 +3830 114100650 11832383 50108807 +3837 118516492 14825087 7873546 +3846 64473041 8177663 20060195 +3853 127350687 15581542 3384292 +3909 155930062 21568274 5984921 +4213 124964471 12369871 8678187 +4233 37452600 7718783 1484500 +6905 132132442 23510116 1418378 +6907 24109880 14729155 2286260 +6910 15828570 6384489 0 +6924 116526124 10700488 0 +6925 120882106 10027221 809862 +6931 81249403 10740794 188038 +6937 138176008 13731132 19138972 +6938 243931443 25393117 13662895 +6944 78273208 9624070 52156948 +6945 14982249 1942131 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 86022577 13230878 647440 +7151 166318550 36819989 1133515 +7405 3695289 2601612 0 +8319 413797955 60809231 583051055 +8405 373546882 40907530 4604623 +8406 501593408 36682581 39176247 +8408 267313093 64729302 0 +8409 615149443 42047331 3829598 +8411 68488688 46722452 1023324262 +8413 334434716 37035501 879267009 +8415 98160476 15472040 0 +8416 704602033 53624000 63175054 +8418 61536157 23161506 1024393 +8423 40976638 28661494 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_172950 +Canton Demande Transfert Prélèvement +101 4142428 1191553 6499877 +102 10411027 6007085 2413366 +117 18758507 6182371 91905910 +118 3520039 849369 2453297 +119 18053018 19818329 43064810 +120 17772746 8383521 38370381 +126 42982792 7413369 0 +135 5256851 375898 2295350 +140 5217540 1390803 4462465 +410 58274170 17512347 186718228 +413 195896489 74057623 12512294 +414 67356431 16788508 222687403 +416 10260464 9437298 74806347 +419 5041370 5041370 82630600 +420 13732636 12875638 0 +421 448334182 64769773 335272 +427 25068958 12126575 26832053 +429 213364367 43448433 27954453 +430 63793143 38494011 404505420 +505 60234779 6930656 16188929 +509 133742539 13708850 132436999 +512 54891264 16225843 82909973 +515 6721648 1945756 1771588 +516 9175079 2341009 23653875 +518 13252213 21271190 41345088 +522 53746519 23120724 20826822 +523 73406985 14250841 0 +524 17142939 10079339 30607462 +717 35222273 4614861 3648035 +722 70779027 11467198 3874220 +724 40420334 12804418 1350980 +1307 150366016 8151686 118665741 +1309 290723015 17558643 1095579683 +1312 74146839 14960968 366161408 +1326 358567290 27056206 318905499 +1327 248849582 51898426 752308731 +1331 91257029 3614949 461656691 +1333 2837468 2837468 0 +2103 29616154 6862123 2442947 +2114 56166695 7986079 5050277 +2134 44474667 9479424 2109520 +2138 43636120 1134782 945883 +2602 167196677 40745693 143483131 +2604 238827985 16015764 13594550 +2607 241280510 36507083 63587596 +2611 74857113 11364001 20631386 +2613 98992006 15634347 38013023 +2615 129686401 21136914 50325536 +2616 104282711 23222720 61577479 +2619 264043273 20738070 32457630 +2621 161515759 23097711 38450839 +2623 134225369 5402477 16015650 +2625 111091552 18835170 45874726 +2626 183739940 16283427 42665939 +2628 58355099 12060513 68835227 +2629 16242179 2354899 29721243 +2632 10650335 1876750 6035656 +2634 125060249 11286016 72171351 +3006 141741368 23740934 1189743 +3009 80155507 6712669 59789500 +3016 46746162 2742013 167472 +3023 102849876 24495208 13936608 +3026 152149408 22028922 5796988 +3802 122603476 8360225 17882199 +3807 35921493 7571610 20948722 +3808 152110241 13736762 31321341 +3815 30953404 4611335 2496776 +3819 154822317 17360772 32182113 +3822 2029471 1577945 222929443 +3824 36331007 5449525 53644868 +3825 51601064 5651187 6702247 +3830 29703760 4045882 50108807 +3837 45376194 4199315 7873546 +3846 13213445 3303756 20060195 +3853 40304290 4932799 3384292 +3909 63073676 8382324 5984921 +4213 48753701 6517001 8678187 +4233 13491552 2701557 1484500 +6905 59152272 10103228 1418378 +6907 11570357 8093869 2286260 +6910 8384813 3057612 0 +6924 37818019 5754782 0 +6925 51748076 6034662 809862 +6931 28147983 4448069 188038 +6937 29496147 4496112 19138972 +6938 44503340 18172293 13662895 +6944 22099061 3523932 52156948 +6945 9322643 981891 85238 +6948 551382 551382 0 +6949 2542771 2542771 1142848 +7116 43886996 5830491 647440 +7151 75180521 18464820 1133515 +7405 1918564 1322188 0 +8319 192997215 70934742 583051055 +8405 207374089 22265730 4604623 +8406 286996300 21230681 39176247 +8408 150665029 38919978 0 +8409 349606965 27745038 3829598 +8411 48311966 29063342 1023324262 +8413 276267745 16613657 879267009 +8415 61693325 13928837 0 +8416 381551320 27201208 63175054 +8418 65696266 18520089 1024393 +8423 18780040 16560720 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173042 +Canton Demande Transfert Prélèvement +101 2881375 1008106 6499877 +102 8563406 5142435 2413366 +117 13644937 5021981 91905910 +118 1852364 780932 2453297 +119 13860800 15947129 43064810 +120 15255408 3167060 38370381 +126 32410297 6764766 0 +135 3602666 375898 2295350 +140 5364007 1572764 4462465 +410 26378698 16690570 186718228 +413 164115404 95711772 12512294 +414 56637675 18422903 222687403 +416 9108882 7862970 74806347 +419 4448593 4444223 82630600 +420 12080580 11461908 0 +421 330466131 48876136 335272 +427 20395627 10938743 26832053 +429 189556748 35598950 27954453 +430 61340314 25390842 404505420 +505 48104668 6686429 16188929 +509 105425153 13783545 132436999 +512 46025322 13637012 82909973 +515 4873104 2094541 1771588 +516 6960207 2468757 23653875 +518 16261960 13216080 41345088 +522 50981858 19334269 20826822 +523 64506366 10735842 0 +524 16080848 8900212 30607462 +717 30936983 5005811 3648035 +722 54506436 9392207 3874220 +724 29392548 10338894 1350980 +1307 119294087 6913982 118665741 +1309 224198317 18283969 1095579683 +1312 59000659 8031995 366161408 +1326 262010359 22739306 318905499 +1327 224529608 51668459 752308731 +1331 63662326 3603008 461656691 +1333 8576894 2515996 0 +2103 22580515 6093537 2442947 +2114 40508957 7593327 5050277 +2134 33181675 8275233 2109520 +2138 32068356 1119713 945883 +2602 121187629 32911617 143483131 +2604 206553463 16194696 13594550 +2607 191695134 34262861 63587596 +2611 64571972 12690715 20631386 +2613 83641284 11945573 38013023 +2615 109453093 21402764 50325536 +2616 80906483 23082564 61577479 +2619 203771608 20002777 32457630 +2621 130171751 27486151 38450839 +2623 112395731 6327045 16015650 +2625 101262608 14596400 45874726 +2626 147590582 19620158 42665939 +2628 44252270 10778881 68835227 +2629 12675663 2945306 29721243 +2632 8080381 1713812 6035656 +2634 108650656 8051726 72171351 +3006 119742012 18685163 1189743 +3009 64604379 5861175 59789500 +3016 39381567 1704658 167472 +3023 97180150 21688300 13936608 +3026 120196458 20012888 5796988 +3802 98458866 7736835 17882199 +3807 45371658 12678506 20948722 +3808 122032189 11586001 31321341 +3815 22542558 3867566 2496776 +3819 100857101 13373319 32182113 +3822 1482641 1298438 222929443 +3824 31517795 4773309 53644868 +3825 38662370 5214449 6702247 +3830 24348572 3564322 50108807 +3837 32349406 5325532 7873546 +3846 11488927 2719562 20060195 +3853 35048514 4521391 3384292 +3909 52106950 6934105 5984921 +4213 39350049 4163509 8678187 +4233 10575830 2545657 1484500 +6905 43260470 9039268 1418378 +6907 9226610 7117376 2286260 +6910 5967282 2902235 0 +6924 27758774 5169129 0 +6925 39139730 4213806 809862 +6931 27029104 3499280 188038 +6937 18694056 4251027 19138972 +6938 57838689 14995508 13662895 +6944 18853442 3515941 52156948 +6945 6651861 981891 85238 +6948 473089 1076836 0 +6949 1952852 1952852 1142848 +7116 32780701 5212549 647440 +7151 56748134 16353645 1133515 +7405 1455567 1129496 0 +8319 165446617 45958433 583051055 +8405 180149700 23884980 4604623 +8406 228686213 21081995 39176247 +8408 138107968 31573717 0 +8409 265613030 23231757 3829598 +8411 49484876 36497953 1023324262 +8413 232776250 19017585 879267009 +8415 48735164 9702753 0 +8416 282061131 29931753 63175054 +8418 49150640 17211673 1024393 +8423 16198814 20811223 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173119 +Canton Demande Transfert Prélèvement +101 1087227 800742 6499877 +102 4894893 3375430 2413366 +117 7938900 3553852 91905910 +118 1410216 530832 2453297 +119 10629432 8072767 43064810 +120 9352540 3323230 38370381 +126 17361123 4550678 0 +135 879604 313488 2295350 +140 3984265 1599050 4462465 +410 23012072 12578856 186718228 +413 113970192 60920662 12512294 +414 29543381 11983964 222687403 +416 6796587 7155067 74806347 +419 3284388 3284388 82630600 +420 9828710 8797034 0 +421 197902096 30914165 335272 +427 13498859 8308659 26832053 +429 79561785 29723249 27954453 +430 40402076 25792387 404505420 +505 33712914 5340900 16188929 +509 67909678 9496516 132436999 +512 31131481 11506721 82909973 +515 3498141 1305158 1771588 +516 4505586 1881170 23653875 +518 10057695 8687178 41345088 +522 37059566 15230994 20826822 +523 39197181 10871943 0 +524 14682941 7415949 30607462 +717 13943805 5554955 3648035 +722 26504226 6074988 3874220 +724 18001077 8581165 1350980 +1307 76355004 7352613 118665741 +1309 173019409 14215328 1095579683 +1312 41968261 8171568 366161408 +1326 179942019 17497562 318905499 +1327 195598822 42118269 752308731 +1331 42408879 2937154 461656691 +1333 4213085 1922890 0 +2103 11087031 4224380 2442947 +2114 16453269 5740070 5050277 +2134 15257196 5773925 2109520 +2138 12942676 959101 945883 +2602 93626032 27820085 143483131 +2604 136506940 12908565 13594550 +2607 122952192 29554145 63587596 +2611 44119881 8491882 20631386 +2613 40895660 9384305 38013023 +2615 82883712 15435978 50325536 +2616 59079053 19757043 61577479 +2619 123875513 16092651 32457630 +2621 100101809 16345790 38450839 +2623 59942300 5529490 16015650 +2625 54163514 11954971 45874726 +2626 94322404 12361816 42665939 +2628 22521083 7208479 68835227 +2629 7400807 1907724 29721243 +2632 6095333 1549528 6035656 +2634 65955028 7569844 72171351 +3006 88519757 12390099 1189743 +3009 43089260 6356498 59789500 +3016 26383206 1430536 167472 +3023 84474417 16468499 13936608 +3026 66323503 15669518 5796988 +3802 49507064 8606810 17882199 +3807 33433386 6109509 20948722 +3808 73280815 12006704 31321341 +3815 11968166 4378682 2496776 +3819 46581218 12252445 32182113 +3822 818728 818213 222929443 +3824 13665047 3523327 53644868 +3825 18593721 3935860 6702247 +3830 9737865 2315417 50108807 +3837 15971484 3469444 7873546 +3846 9327709 2286513 20060195 +3853 19816756 4191452 3384292 +3909 25439066 8150187 5984921 +4213 20402435 4079272 8678187 +4233 3869732 1801305 1484500 +6905 18483792 6852998 1418378 +6907 5924159 5001135 2286260 +6910 3970025 1482392 0 +6924 19059218 3188956 0 +6925 17608283 4224701 809862 +6931 12888575 3229412 188038 +6937 17304901 5534579 19138972 +6938 36851162 9479135 13662895 +6944 10107193 2595560 52156948 +6945 2200557 981891 85238 +6948 489027 335781 0 +6949 1418522 1910069 1142848 +7116 15607706 3743471 647440 +7151 24857203 11864173 1133515 +7405 726692 712926 0 +8319 99138355 29736282 583051055 +8405 134645083 17557800 4604623 +8406 119091565 16791185 39176247 +8408 106355608 30802730 0 +8409 128565861 20047099 3829598 +8411 28277678 17316217 1023324262 +8413 144961667 13096072 879267009 +8415 27251189 7985216 0 +8416 143718623 24382131 63175054 +8418 32580108 12979741 1024393 +8423 17853037 13897764 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173210 +Canton Demande Transfert Prélèvement +101 121080 84268 6499877 +102 681208 658751 2413366 +117 966075 703535 91905910 +118 81877 72784 2453297 +119 1065851 993013 43064810 +120 806994 662021 38370381 +126 1948800 955933 0 +135 0 0 2295350 +140 714007 515638 4462465 +410 10493317 6038560 186718228 +413 42400007 28502568 12512294 +414 6459827 4910473 222687403 +416 2956254 2754292 74806347 +419 2379035 1548828 82630600 +420 9377266 3743902 0 +421 15714561 6287169 335272 +427 4149157 3472009 26832053 +429 16615337 9086174 27954453 +430 21060888 11821777 404505420 +505 9142181 2438288 16188929 +509 17921503 4964546 132436999 +512 9612118 4348956 82909973 +515 1086951 744074 1771588 +516 1264079 948741 23653875 +518 2149130 1968271 41345088 +522 15522205 9591010 20826822 +523 14124999 6198214 0 +524 5443811 4430724 30607462 +717 1830316 1321224 3648035 +722 5150394 1753150 3874220 +724 3886028 2839555 1350980 +1307 24377766 2168052 118665741 +1309 69188091 7305639 1095579683 +1312 20478914 3720724 366161408 +1326 64499895 5241287 318905499 +1327 76041433 24954853 752308731 +1331 13673767 1001539 461656691 +1333 10355264 606271 0 +2103 1490217 809327 2442947 +2114 1300523 734080 5050277 +2134 2053206 1203111 2109520 +2138 1161404 183837 945883 +2602 21659632 13189041 143483131 +2604 35335430 5565892 13594550 +2607 42324521 16004345 63587596 +2611 5941330 2165156 20631386 +2613 7312887 2445203 38013023 +2615 30729928 8198871 50325536 +2616 21113319 10086829 61577479 +2619 37254071 7724777 32457630 +2621 21169701 4522051 38450839 +2623 8218490 1836296 16015650 +2625 12105980 4395425 45874726 +2626 13642162 4383706 42665939 +2628 3714932 1824237 68835227 +2629 2471364 700621 29721243 +2632 1193040 912232 6035656 +2634 19723328 3363636 72171351 +3006 21273788 5265348 1189743 +3009 14944348 2353239 59789500 +3016 7803241 628998 167472 +3023 42515467 6867530 13936608 +3026 23252333 6851003 5796988 +3802 3778607 1281902 17882199 +3807 2929091 839619 20948722 +3808 8194849 2924516 31321341 +3815 966879 614538 2496776 +3819 6304739 2914216 32182113 +3822 143584 143584 222929443 +3824 1277648 668846 53644868 +3825 1418424 611323 6702247 +3830 504460 372102 50108807 +3837 1446141 642380 7873546 +3846 1209807 592843 20060195 +3853 1898242 817233 3384292 +3909 2970782 1133833 5984921 +4213 2715258 885769 8678187 +4233 346534 343528 1484500 +6905 2494677 1354898 1418378 +6907 1407288 1407288 2286260 +6910 583401 250482 0 +6924 2175670 1030189 0 +6925 2584893 829792 809862 +6931 1729773 811839 188038 +6937 2246572 1012029 19138972 +6938 5256713 2313299 13662895 +6944 1601523 546042 52156948 +6945 333833 144153 85238 +6948 130912 129244 0 +6949 368357 504263 1142848 +7116 1984390 849892 647440 +7151 3827766 2898645 1133515 +7405 89653 89653 0 +8319 25795155 13593522 583051055 +8405 46098810 9377826 4604623 +8406 28444152 6188519 39176247 +8408 44941005 16365878 0 +8409 23599622 5082049 3829598 +8411 9971291 6791979 1023324262 +8413 38274921 6897377 879267009 +8415 8421670 3647982 0 +8416 26023903 4878261 63175054 +8418 13648964 6304569 1024393 +8423 7720836 6069620 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173313 +Canton Demande Transfert Prélèvement +101 29868 25433 6499877 +102 327486 327456 2413366 +117 353002 322956 91905910 +118 28376 27664 2453297 +119 314303 298674 43064810 +120 231363 208176 38370381 +126 811692 500153 0 +135 0 0 2295350 +140 263564 135005 4462465 +410 6425825 4512741 186718228 +413 36972261 21568166 12512294 +414 3995766 3389226 222687403 +416 2004018 1890473 74806347 +419 1407452 1134741 82630600 +420 6333198 2769727 0 +421 6102028 3762744 335272 +427 2800734 2467426 26832053 +429 8445147 7890329 27954453 +430 18639554 9672733 404505420 +505 5970528 1746102 16188929 +509 11918268 3746769 132436999 +512 5998172 3110010 82909973 +515 698470 512453 1771588 +516 838192 670926 23653875 +518 1251781 1148484 41345088 +522 12112287 7086165 20826822 +523 10346530 4366080 0 +524 3531570 3653616 30607462 +717 916092 772075 3648035 +722 2946288 1093688 3874220 +724 2406477 1834364 1350980 +1307 15193538 1561223 118665741 +1309 49407899 5563218 1095579683 +1312 14294305 3385037 366161408 +1326 44565199 3479860 318905499 +1327 54655833 22565824 752308731 +1331 9047943 586071 461656691 +1333 7325152 402025 0 +2103 725293 420254 2442947 +2114 406908 292226 5050277 +2134 981116 643842 2109520 +2138 515888 91904 945883 +2602 14535704 8482317 143483131 +2604 20383833 4340709 13594550 +2607 28661104 13120805 63587596 +2611 2899564 1262754 20631386 +2613 4007655 1547425 38013023 +2615 18984735 6011348 50325536 +2616 14456806 7160232 61577479 +2619 23757360 6234750 32457630 +2621 10026833 2875765 38450839 +2623 3679368 877134 16015650 +2625 7454793 3178521 45874726 +2626 6784841 2743545 42665939 +2628 2087011 1109114 68835227 +2629 1397122 586093 29721243 +2632 813285 544065 6035656 +2634 11864341 2625111 72171351 +3006 15523750 3152106 1189743 +3009 9769206 1667440 59789500 +3016 4965620 481911 167472 +3023 29852753 4980727 13936608 +3026 14520086 5044300 5796988 +3802 1535112 760032 17882199 +3807 1212019 467239 20948722 +3808 4106953 1455913 31321341 +3815 408815 324668 2496776 +3819 3366618 1934251 32182113 +3822 75571 75571 222929443 +3824 466656 346000 53644868 +3825 563876 325029 6702247 +3830 195405 175006 50108807 +3837 608118 301249 7873546 +3846 506317 210170 20060195 +3853 731615 409051 3384292 +3909 1140033 558766 5984921 +4213 1304753 500126 8678187 +4233 176189 176189 1484500 +6905 1208039 701037 1418378 +6907 828492 828492 2286260 +6910 271188 121035 0 +6924 905026 558151 0 +6925 1324841 618039 809862 +6931 675997 413004 188038 +6937 968859 387179 19138972 +6938 2480627 1373866 13662895 +6944 650496 187713 52156948 +6945 165007 60128 85238 +6948 67979 67979 0 +6949 203140 203140 1142848 +7116 943107 462769 647440 +7151 1996593 1651669 1133515 +7405 45935 45935 0 +8319 15549200 10780566 583051055 +8405 30716781 7512908 4604623 +8406 16057147 4291955 39176247 +8408 33733304 12701177 0 +8409 11662263 3415795 3829598 +8411 6575150 4759054 1023324262 +8413 22384839 5303631 879267009 +8415 5306144 2696989 0 +8416 14862884 3109542 63175054 +8418 8044222 4646059 1024393 +8423 5869220 4816080 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173806 +Canton Demande Transfert Prélèvement +101 441374 522328 6499877 +102 2867319 2705467 2413366 +117 2856114 3223933 91905910 +118 452543 313177 2453297 +119 3217948 3782265 43064810 +120 2021760 2285673 38370381 +126 5275203 3655694 0 +135 129083 782148 2295350 +140 872599 864691 4462465 +410 16033999 12128415 186718228 +413 42431927 33370598 12512294 +414 13136334 11273077 222687403 +416 5480356 5474821 74806347 +419 2341235 3892446 82630600 +420 8633424 9734048 0 +421 36968772 18374379 335272 +427 7607821 6927007 26832053 +429 22211426 17796536 27954453 +430 23591799 12802803 404505420 +505 7901413 3395650 16188929 +509 14347180 6376497 132436999 +512 13897791 8892811 82909973 +515 1777033 1245270 1771588 +516 2254115 1778607 23653875 +518 4741922 5791405 41345088 +522 12894728 10154071 20826822 +523 11340990 6320147 0 +524 4731284 4926647 30607462 +717 5042820 3781878 3648035 +722 7843630 5044599 3874220 +724 9120585 7499698 1350980 +1307 11596242 4191455 118665741 +1309 23437048 6864789 1095579683 +1312 7668445 4395720 366161408 +1326 31392570 10055591 318905499 +1327 50584659 27278106 752308731 +1331 6069724 1613171 461656691 +1333 2350611 1833231 0 +2103 4457515 3333395 2442947 +2114 6095861 4417195 5050277 +2134 6187344 4686429 2109520 +2138 2217724 772818 945883 +2602 25070403 18633504 143483131 +2604 20962936 9301970 13594550 +2607 34470751 20820440 63587596 +2611 6646265 6376634 20631386 +2613 10473926 7464314 38013023 +2615 22631915 10432088 50325536 +2616 18105822 12070481 61577479 +2619 21540204 10666931 32457630 +2621 15578671 10529341 38450839 +2623 5699256 4226952 16015650 +2625 19071559 10250082 45874726 +2626 12990099 10309429 42665939 +2628 7215574 6099361 68835227 +2629 1692048 1273857 29721243 +2632 1575751 1072754 6035656 +2634 10741635 5001113 72171351 +3006 17702597 10952308 1189743 +3009 11083688 4510763 59789500 +3016 4240129 1253281 167472 +3023 23964481 16746315 13936608 +3026 22280953 13409756 5796988 +3802 6884789 5714644 17882199 +3807 5602651 5586053 20948722 +3808 11952832 6680628 31321341 +3815 2851329 2712177 2496776 +3819 10741922 7555864 32182113 +3822 617878 617878 222929443 +3824 4075349 3080813 53644868 +3825 3046499 3492080 6702247 +3830 2248660 1774953 50108807 +3837 2982860 2679379 7873546 +3846 2049273 1823285 20060195 +3853 3529930 3606006 3384292 +3909 3786564 4805726 5984921 +4213 5662717 2923026 8678187 +4233 1995333 1614366 1484500 +6905 7489944 5235369 1418378 +6907 4358421 4186006 2286260 +6910 1557845 955425 0 +6924 5570581 2633082 0 +6925 3352849 2670373 809862 +6931 4359860 2376899 188038 +6937 3779682 2950503 19138972 +6938 7518735 4874973 13662895 +6944 2038024 1183694 52156948 +6945 418889 352959 85238 +6948 323308 320480 0 +6949 982295 1270914 1142848 +7116 4731356 3038578 647440 +7151 11329439 9665621 1133515 +7405 550889 544743 0 +8319 25118738 19859224 583051055 +8405 23695541 9960035 4604623 +8406 27996855 13312587 39176247 +8408 25468053 18572692 0 +8409 25805567 14574816 3829598 +8411 18939548 16813471 1023324262 +8413 28047239 10431173 879267009 +8415 11963899 7002556 0 +8416 30968480 16458393 63175054 +8418 11713891 10474559 1024393 +8423 6905927 7545493 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_112352 +Canton Demande Transfert Prélèvement +101 9056559 5017698 6499877 +102 37973118 12698507 2413366 +117 55637579 24591351 91905910 +118 12772066 3029659 2453297 +119 105365196 31970376 43064810 +120 106496285 12538133 38370381 +126 107513935 17178977 0 +135 127638035 11283353 2295350 +140 23170336 5119410 4462465 +410 96011540 26686957 186718228 +413 332229572 104601917 12512294 +414 105266287 30820580 222687403 +416 16530388 11983294 74806347 +419 7698372 7698372 82630600 +420 20332532 19514850 0 +421 1206778769 100005154 335272 +427 53542304 13737466 26832053 +429 610411709 78621352 27954453 +430 89223277 38395538 404505420 +505 134255710 13715962 16188929 +509 252905500 20968374 132436999 +512 82166377 24523885 82909973 +515 12615537 2482267 1771588 +516 16546021 4113323 23653875 +518 36727161 29817993 41345088 +522 93757932 29929135 20826822 +523 119113124 19365444 0 +524 39469767 12797170 30607462 +717 81385514 14214867 3648035 +722 134751690 16221803 3874220 +724 72805777 28108893 1350980 +1307 253682954 17299975 118665741 +1309 471813858 28872566 1095579683 +1312 127369323 17863077 366161408 +1326 488961888 43790988 318905499 +1327 457979597 66795253 752308731 +1331 120524207 6706880 461656691 +1333 4319205 4324711 0 +2103 66087446 16261376 2442947 +2114 147351309 25122892 5050277 +2134 94945689 20518068 2109520 +2138 104687191 3550393 945883 +2602 327743131 65169766 143483131 +2604 488486554 33254508 13594550 +2607 395192495 63160273 63587596 +2611 225643033 28008568 20631386 +2613 191580718 28825744 38013023 +2615 254871981 34824990 50325536 +2616 203215837 45947246 61577479 +2619 438190012 42052928 32457630 +2621 384538309 50556028 38450839 +2623 289901640 21177243 16015650 +2625 224492796 26871976 45874726 +2626 413548588 48309566 42665939 +2628 119866515 28185194 68835227 +2629 24794124 4355731 29721243 +2632 19073888 4495136 6035656 +2634 209165623 22259457 72171351 +3006 249455425 35132919 1189743 +3009 104737703 12075754 59789500 +3016 64791165 6575784 167472 +3023 118659932 34142095 13936608 +3026 237115095 32572598 5796988 +3802 410968175 31277310 17882199 +3807 239249437 33380194 20948722 +3808 404452379 42220139 31321341 +3815 118735337 17180301 2496776 +3819 297067414 44289987 32182113 +3822 4531078 4405150 222929443 +3824 157002836 18825350 53644868 +3825 168373780 18787110 6702247 +3830 114100650 11832383 50108807 +3837 118516492 14825087 7873546 +3846 64473041 8177663 20060195 +3853 127350687 15581542 3384292 +3909 155930062 21568274 5984921 +4213 124964471 12369871 8678187 +4233 37452600 7718783 1484500 +6905 132132442 23510116 1418378 +6907 24109880 14729155 2286260 +6910 15828570 6384489 0 +6924 116526124 10700488 0 +6925 120882106 10027221 809862 +6931 81249403 10740794 188038 +6937 138176008 13731132 19138972 +6938 243931443 25393117 13662895 +6944 78273208 9624070 52156948 +6945 14982249 1942131 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 86022577 13230878 647440 +7151 166318550 36819989 1133515 +7405 3695289 2601612 0 +8319 413797955 60809231 583051055 +8405 373546882 40907530 4604623 +8406 501593408 36682581 39176247 +8408 267313093 64729302 0 +8409 615149443 42047331 3829598 +8411 68488688 46722452 1023324262 +8413 334434716 37035501 879267009 +8415 98160476 15472040 0 +8416 704602033 53624000 63175054 +8418 61536157 23161506 1024393 +8423 40976638 28661494 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_140608 +Canton Demande Transfert Prélèvement +101 9056559 5017698 6499877 +102 37973118 12698507 2413366 +117 55637579 24591351 91905910 +118 12772066 3029659 2453297 +119 105365196 31970376 43064810 +120 106496285 12538133 38370381 +126 107513935 17178977 0 +135 127638035 11283353 2295350 +140 23170336 5119410 4462465 +410 96011540 26686957 186718228 +413 332229572 104601917 12512294 +414 105266287 30820580 222687403 +416 16530388 11983294 74806347 +419 7698372 7698372 82630600 +420 20332532 19514850 0 +421 1206778769 100005154 335272 +427 53542304 13737466 26832053 +429 610411709 78621352 27954453 +430 89223277 38395538 404505420 +505 134255710 13715962 16188929 +509 252905500 20968374 132436999 +512 82166377 24523885 82909973 +515 12615537 2482267 1771588 +516 16546021 4113323 23653875 +518 36727161 29817993 41345088 +522 93757932 29929135 20826822 +523 119113124 19365444 0 +524 39469767 12797170 30607462 +717 81385514 14214867 3648035 +722 134751690 16221803 3874220 +724 72805777 28108893 1350980 +1307 253682954 17299975 118665741 +1309 471813858 28872566 1095579683 +1312 127369323 17863077 366161408 +1326 488961888 43790988 318905499 +1327 457979597 66795253 752308731 +1331 120524207 6706880 461656691 +1333 4319205 4324711 0 +2103 66087446 16261376 2442947 +2114 147351309 25122892 5050277 +2134 94945689 20518068 2109520 +2138 104687191 3550393 945883 +2602 327743131 65169766 143483131 +2604 488486554 33254508 13594550 +2607 395192495 63160273 63587596 +2611 225643033 28008568 20631386 +2613 191580718 28825744 38013023 +2615 254871981 34824990 50325536 +2616 203215837 45947246 61577479 +2619 438190012 42052928 32457630 +2621 384538309 50556028 38450839 +2623 289901640 21177243 16015650 +2625 224492796 26871976 45874726 +2626 413548588 48309566 42665939 +2628 119866515 28185194 68835227 +2629 24794124 4355731 29721243 +2632 19073888 4495136 6035656 +2634 209165623 22259457 72171351 +3006 249455425 35132919 1189743 +3009 104737703 12075754 59789500 +3016 64791165 6575784 167472 +3023 118659932 34142095 13936608 +3026 237115095 32572598 5796988 +3802 410968175 31277310 17882199 +3807 239249437 33380194 20948722 +3808 404452379 42220139 31321341 +3815 118735337 17180301 2496776 +3819 297067414 44289987 32182113 +3822 4531078 4405150 222929443 +3824 157002836 18825350 53644868 +3825 168373780 18787110 6702247 +3830 114100650 11832383 50108807 +3837 118516492 14825087 7873546 +3846 64473041 8177663 20060195 +3853 127350687 15581542 3384292 +3909 155930062 21568274 5984921 +4213 124964471 12369871 8678187 +4233 37452600 7718783 1484500 +6905 132132442 23510116 1418378 +6907 24109880 14729155 2286260 +6910 15828570 6384489 0 +6924 116526124 10700488 0 +6925 120882106 10027221 809862 +6931 81249403 10740794 188038 +6937 138176008 13731132 19138972 +6938 243931443 25393117 13662895 +6944 78273208 9624070 52156948 +6945 14982249 1942131 85238 +6948 1709854 907649 0 +6949 4561694 4561694 1142848 +7116 86022577 13230878 647440 +7151 166318550 36819989 1133515 +7405 3695289 2601612 0 +8319 413797955 60809231 583051055 +8405 373546882 40907530 4604623 +8406 501593408 36682581 39176247 +8408 267313093 64729302 0 +8409 615149443 42047331 3829598 +8411 68488688 46722452 1023324262 +8413 334434716 37035501 879267009 +8415 98160476 15472040 0 +8416 704602033 53624000 63175054 +8418 61536157 23161506 1024393 +8423 40976638 28661494 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_172950 +Canton Demande Transfert Prélèvement +101 4142428 1191553 6499877 +102 10411027 6007085 2413366 +117 18758507 6182371 91905910 +118 3520039 849369 2453297 +119 18053018 19818329 43064810 +120 17772746 8383521 38370381 +126 42982792 7413369 0 +135 5256851 375898 2295350 +140 5217540 1390803 4462465 +410 58274170 17512347 186718228 +413 195896489 74057623 12512294 +414 67356431 16788508 222687403 +416 10260464 9437298 74806347 +419 5041370 5041370 82630600 +420 13732636 12875638 0 +421 448334182 64769773 335272 +427 25068958 12126575 26832053 +429 213364367 43448433 27954453 +430 63793143 38494011 404505420 +505 60234779 6930656 16188929 +509 133742539 13708850 132436999 +512 54891264 16225843 82909973 +515 6721648 1945756 1771588 +516 9175079 2341009 23653875 +518 13252213 21271190 41345088 +522 53746519 23120724 20826822 +523 73406985 14250841 0 +524 17142939 10079339 30607462 +717 35222273 4614861 3648035 +722 70779027 11467198 3874220 +724 40420334 12804418 1350980 +1307 150366016 8151686 118665741 +1309 290723015 17558643 1095579683 +1312 74146839 14960968 366161408 +1326 358567290 27056206 318905499 +1327 248849582 51898426 752308731 +1331 91257029 3614949 461656691 +1333 2837468 2837468 0 +2103 29616154 6862123 2442947 +2114 56166695 7986079 5050277 +2134 44474667 9479424 2109520 +2138 43636120 1134782 945883 +2602 167196677 40745693 143483131 +2604 238827985 16015764 13594550 +2607 241280510 36507083 63587596 +2611 74857113 11364001 20631386 +2613 98992006 15634347 38013023 +2615 129686401 21136914 50325536 +2616 104282711 23222720 61577479 +2619 264043273 20738070 32457630 +2621 161515759 23097711 38450839 +2623 134225369 5402477 16015650 +2625 111091552 18835170 45874726 +2626 183739940 16283427 42665939 +2628 58355099 12060513 68835227 +2629 16242179 2354899 29721243 +2632 10650335 1876750 6035656 +2634 125060249 11286016 72171351 +3006 141741368 23740934 1189743 +3009 80155507 6712669 59789500 +3016 46746162 2742013 167472 +3023 102849876 24495208 13936608 +3026 152149408 22028922 5796988 +3802 122603476 8360225 17882199 +3807 35921493 7571610 20948722 +3808 152110241 13736762 31321341 +3815 30953404 4611335 2496776 +3819 154822317 17360772 32182113 +3822 2029471 1577945 222929443 +3824 36331007 5449525 53644868 +3825 51601064 5651187 6702247 +3830 29703760 4045882 50108807 +3837 45376194 4199315 7873546 +3846 13213445 3303756 20060195 +3853 40304290 4932799 3384292 +3909 63073676 8382324 5984921 +4213 48753701 6517001 8678187 +4233 13491552 2701557 1484500 +6905 59152272 10103228 1418378 +6907 11570357 8093869 2286260 +6910 8384813 3057612 0 +6924 37818019 5754782 0 +6925 51748076 6034662 809862 +6931 28147983 4448069 188038 +6937 29496147 4496112 19138972 +6938 44503340 18172293 13662895 +6944 22099061 3523932 52156948 +6945 9322643 981891 85238 +6948 551382 551382 0 +6949 2542771 2542771 1142848 +7116 43886996 5830491 647440 +7151 75180521 18464820 1133515 +7405 1918564 1322188 0 +8319 192997215 70934742 583051055 +8405 207374089 22265730 4604623 +8406 286996300 21230681 39176247 +8408 150665029 38919978 0 +8409 349606965 27745038 3829598 +8411 48311966 29063342 1023324262 +8413 276267745 16613657 879267009 +8415 61693325 13928837 0 +8416 381551320 27201208 63175054 +8418 65696266 18520089 1024393 +8423 18780040 16560720 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173042 +Canton Demande Transfert Prélèvement +101 2881375 1008106 6499877 +102 8563406 5142435 2413366 +117 13644937 5021981 91905910 +118 1852364 780932 2453297 +119 13860800 15947129 43064810 +120 15255408 3167060 38370381 +126 32410297 6764766 0 +135 3602666 375898 2295350 +140 5364007 1572764 4462465 +410 26378698 16690570 186718228 +413 164115404 95711772 12512294 +414 56637675 18422903 222687403 +416 9108882 7862970 74806347 +419 4448593 4444223 82630600 +420 12080580 11461908 0 +421 330466131 48876136 335272 +427 20395627 10938743 26832053 +429 189556748 35598950 27954453 +430 61340314 25390842 404505420 +505 48104668 6686429 16188929 +509 105425153 13783545 132436999 +512 46025322 13637012 82909973 +515 4873104 2094541 1771588 +516 6960207 2468757 23653875 +518 16261960 13216080 41345088 +522 50981858 19334269 20826822 +523 64506366 10735842 0 +524 16080848 8900212 30607462 +717 30936983 5005811 3648035 +722 54506436 9392207 3874220 +724 29392548 10338894 1350980 +1307 119294087 6913982 118665741 +1309 224198317 18283969 1095579683 +1312 59000659 8031995 366161408 +1326 262010359 22739306 318905499 +1327 224529608 51668459 752308731 +1331 63662326 3603008 461656691 +1333 8576894 2515996 0 +2103 22580515 6093537 2442947 +2114 40508957 7593327 5050277 +2134 33181675 8275233 2109520 +2138 32068356 1119713 945883 +2602 121187629 32911617 143483131 +2604 206553463 16194696 13594550 +2607 191695134 34262861 63587596 +2611 64571972 12690715 20631386 +2613 83641284 11945573 38013023 +2615 109453093 21402764 50325536 +2616 80906483 23082564 61577479 +2619 203771608 20002777 32457630 +2621 130171751 27486151 38450839 +2623 112395731 6327045 16015650 +2625 101262608 14596400 45874726 +2626 147590582 19620158 42665939 +2628 44252270 10778881 68835227 +2629 12675663 2945306 29721243 +2632 8080381 1713812 6035656 +2634 108650656 8051726 72171351 +3006 119742012 18685163 1189743 +3009 64604379 5861175 59789500 +3016 39381567 1704658 167472 +3023 97180150 21688300 13936608 +3026 120196458 20012888 5796988 +3802 98458866 7736835 17882199 +3807 45371658 12678506 20948722 +3808 122032189 11586001 31321341 +3815 22542558 3867566 2496776 +3819 100857101 13373319 32182113 +3822 1482641 1298438 222929443 +3824 31517795 4773309 53644868 +3825 38662370 5214449 6702247 +3830 24348572 3564322 50108807 +3837 32349406 5325532 7873546 +3846 11488927 2719562 20060195 +3853 35048514 4521391 3384292 +3909 52106950 6934105 5984921 +4213 39350049 4163509 8678187 +4233 10575830 2545657 1484500 +6905 43260470 9039268 1418378 +6907 9226610 7117376 2286260 +6910 5967282 2902235 0 +6924 27758774 5169129 0 +6925 39139730 4213806 809862 +6931 27029104 3499280 188038 +6937 18694056 4251027 19138972 +6938 57838689 14995508 13662895 +6944 18853442 3515941 52156948 +6945 6651861 981891 85238 +6948 473089 1076836 0 +6949 1952852 1952852 1142848 +7116 32780701 5212549 647440 +7151 56748134 16353645 1133515 +7405 1455567 1129496 0 +8319 165446617 45958433 583051055 +8405 180149700 23884980 4604623 +8406 228686213 21081995 39176247 +8408 138107968 31573717 0 +8409 265613030 23231757 3829598 +8411 49484876 36497953 1023324262 +8413 232776250 19017585 879267009 +8415 48735164 9702753 0 +8416 282061131 29931753 63175054 +8418 49150640 17211673 1024393 +8423 16198814 20811223 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173119 +Canton Demande Transfert Prélèvement +101 1087227 800742 6499877 +102 4894893 3375430 2413366 +117 7938900 3553852 91905910 +118 1410216 530832 2453297 +119 10629432 8072767 43064810 +120 9352540 3323230 38370381 +126 17361123 4550678 0 +135 879604 313488 2295350 +140 3984265 1599050 4462465 +410 23012072 12578856 186718228 +413 113970192 60920662 12512294 +414 29543381 11983964 222687403 +416 6796587 7155067 74806347 +419 3284388 3284388 82630600 +420 9828710 8797034 0 +421 197902096 30914165 335272 +427 13498859 8308659 26832053 +429 79561785 29723249 27954453 +430 40402076 25792387 404505420 +505 33712914 5340900 16188929 +509 67909678 9496516 132436999 +512 31131481 11506721 82909973 +515 3498141 1305158 1771588 +516 4505586 1881170 23653875 +518 10057695 8687178 41345088 +522 37059566 15230994 20826822 +523 39197181 10871943 0 +524 14682941 7415949 30607462 +717 13943805 5554955 3648035 +722 26504226 6074988 3874220 +724 18001077 8581165 1350980 +1307 76355004 7352613 118665741 +1309 173019409 14215328 1095579683 +1312 41968261 8171568 366161408 +1326 179942019 17497562 318905499 +1327 195598822 42118269 752308731 +1331 42408879 2937154 461656691 +1333 4213085 1922890 0 +2103 11087031 4224380 2442947 +2114 16453269 5740070 5050277 +2134 15257196 5773925 2109520 +2138 12942676 959101 945883 +2602 93626032 27820085 143483131 +2604 136506940 12908565 13594550 +2607 122952192 29554145 63587596 +2611 44119881 8491882 20631386 +2613 40895660 9384305 38013023 +2615 82883712 15435978 50325536 +2616 59079053 19757043 61577479 +2619 123875513 16092651 32457630 +2621 100101809 16345790 38450839 +2623 59942300 5529490 16015650 +2625 54163514 11954971 45874726 +2626 94322404 12361816 42665939 +2628 22521083 7208479 68835227 +2629 7400807 1907724 29721243 +2632 6095333 1549528 6035656 +2634 65955028 7569844 72171351 +3006 88519757 12390099 1189743 +3009 43089260 6356498 59789500 +3016 26383206 1430536 167472 +3023 84474417 16468499 13936608 +3026 66323503 15669518 5796988 +3802 49507064 8606810 17882199 +3807 33433386 6109509 20948722 +3808 73280815 12006704 31321341 +3815 11968166 4378682 2496776 +3819 46581218 12252445 32182113 +3822 818728 818213 222929443 +3824 13665047 3523327 53644868 +3825 18593721 3935860 6702247 +3830 9737865 2315417 50108807 +3837 15971484 3469444 7873546 +3846 9327709 2286513 20060195 +3853 19816756 4191452 3384292 +3909 25439066 8150187 5984921 +4213 20402435 4079272 8678187 +4233 3869732 1801305 1484500 +6905 18483792 6852998 1418378 +6907 5924159 5001135 2286260 +6910 3970025 1482392 0 +6924 19059218 3188956 0 +6925 17608283 4224701 809862 +6931 12888575 3229412 188038 +6937 17304901 5534579 19138972 +6938 36851162 9479135 13662895 +6944 10107193 2595560 52156948 +6945 2200557 981891 85238 +6948 489027 335781 0 +6949 1418522 1910069 1142848 +7116 15607706 3743471 647440 +7151 24857203 11864173 1133515 +7405 726692 712926 0 +8319 99138355 29736282 583051055 +8405 134645083 17557800 4604623 +8406 119091565 16791185 39176247 +8408 106355608 30802730 0 +8409 128565861 20047099 3829598 +8411 28277678 17316217 1023324262 +8413 144961667 13096072 879267009 +8415 27251189 7985216 0 +8416 143718623 24382131 63175054 +8418 32580108 12979741 1024393 +8423 17853037 13897764 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173210 +Canton Demande Transfert Prélèvement +101 121080 84268 6499877 +102 681208 658751 2413366 +117 966075 703535 91905910 +118 81877 72784 2453297 +119 1065851 993013 43064810 +120 806994 662021 38370381 +126 1948800 955933 0 +135 0 0 2295350 +140 714007 515638 4462465 +410 10493317 6038560 186718228 +413 42400007 28502568 12512294 +414 6459827 4910473 222687403 +416 2956254 2754292 74806347 +419 2379035 1548828 82630600 +420 9377266 3743902 0 +421 15714561 6287169 335272 +427 4149157 3472009 26832053 +429 16615337 9086174 27954453 +430 21060888 11821777 404505420 +505 9142181 2438288 16188929 +509 17921503 4964546 132436999 +512 9612118 4348956 82909973 +515 1086951 744074 1771588 +516 1264079 948741 23653875 +518 2149130 1968271 41345088 +522 15522205 9591010 20826822 +523 14124999 6198214 0 +524 5443811 4430724 30607462 +717 1830316 1321224 3648035 +722 5150394 1753150 3874220 +724 3886028 2839555 1350980 +1307 24377766 2168052 118665741 +1309 69188091 7305639 1095579683 +1312 20478914 3720724 366161408 +1326 64499895 5241287 318905499 +1327 76041433 24954853 752308731 +1331 13673767 1001539 461656691 +1333 10355264 606271 0 +2103 1490217 809327 2442947 +2114 1300523 734080 5050277 +2134 2053206 1203111 2109520 +2138 1161404 183837 945883 +2602 21659632 13189041 143483131 +2604 35335430 5565892 13594550 +2607 42324521 16004345 63587596 +2611 5941330 2165156 20631386 +2613 7312887 2445203 38013023 +2615 30729928 8198871 50325536 +2616 21113319 10086829 61577479 +2619 37254071 7724777 32457630 +2621 21169701 4522051 38450839 +2623 8218490 1836296 16015650 +2625 12105980 4395425 45874726 +2626 13642162 4383706 42665939 +2628 3714932 1824237 68835227 +2629 2471364 700621 29721243 +2632 1193040 912232 6035656 +2634 19723328 3363636 72171351 +3006 21273788 5265348 1189743 +3009 14944348 2353239 59789500 +3016 7803241 628998 167472 +3023 42515467 6867530 13936608 +3026 23252333 6851003 5796988 +3802 3778607 1281902 17882199 +3807 2929091 839619 20948722 +3808 8194849 2924516 31321341 +3815 966879 614538 2496776 +3819 6304739 2914216 32182113 +3822 143584 143584 222929443 +3824 1277648 668846 53644868 +3825 1418424 611323 6702247 +3830 504460 372102 50108807 +3837 1446141 642380 7873546 +3846 1209807 592843 20060195 +3853 1898242 817233 3384292 +3909 2970782 1133833 5984921 +4213 2715258 885769 8678187 +4233 346534 343528 1484500 +6905 2494677 1354898 1418378 +6907 1407288 1407288 2286260 +6910 583401 250482 0 +6924 2175670 1030189 0 +6925 2584893 829792 809862 +6931 1729773 811839 188038 +6937 2246572 1012029 19138972 +6938 5256713 2313299 13662895 +6944 1601523 546042 52156948 +6945 333833 144153 85238 +6948 130912 129244 0 +6949 368357 504263 1142848 +7116 1984390 849892 647440 +7151 3827766 2898645 1133515 +7405 89653 89653 0 +8319 25795155 13593522 583051055 +8405 46098810 9377826 4604623 +8406 28444152 6188519 39176247 +8408 44941005 16365878 0 +8409 23599622 5082049 3829598 +8411 9971291 6791979 1023324262 +8413 38274921 6897377 879267009 +8415 8421670 3647982 0 +8416 26023903 4878261 63175054 +8418 13648964 6304569 1024393 +8423 7720836 6069620 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173313 +Canton Demande Transfert Prélèvement +101 29868 25433 6499877 +102 327486 327456 2413366 +117 353002 322956 91905910 +118 28376 27664 2453297 +119 314303 298674 43064810 +120 231363 208176 38370381 +126 811692 500153 0 +135 0 0 2295350 +140 263564 135005 4462465 +410 6425825 4512741 186718228 +413 36972261 21568166 12512294 +414 3995766 3389226 222687403 +416 2004018 1890473 74806347 +419 1407452 1134741 82630600 +420 6333198 2769727 0 +421 6102028 3762744 335272 +427 2800734 2467426 26832053 +429 8445147 7890329 27954453 +430 18639554 9672733 404505420 +505 5970528 1746102 16188929 +509 11918268 3746769 132436999 +512 5998172 3110010 82909973 +515 698470 512453 1771588 +516 838192 670926 23653875 +518 1251781 1148484 41345088 +522 12112287 7086165 20826822 +523 10346530 4366080 0 +524 3531570 3653616 30607462 +717 916092 772075 3648035 +722 2946288 1093688 3874220 +724 2406477 1834364 1350980 +1307 15193538 1561223 118665741 +1309 49407899 5563218 1095579683 +1312 14294305 3385037 366161408 +1326 44565199 3479860 318905499 +1327 54655833 22565824 752308731 +1331 9047943 586071 461656691 +1333 7325152 402025 0 +2103 725293 420254 2442947 +2114 406908 292226 5050277 +2134 981116 643842 2109520 +2138 515888 91904 945883 +2602 14535704 8482317 143483131 +2604 20383833 4340709 13594550 +2607 28661104 13120805 63587596 +2611 2899564 1262754 20631386 +2613 4007655 1547425 38013023 +2615 18984735 6011348 50325536 +2616 14456806 7160232 61577479 +2619 23757360 6234750 32457630 +2621 10026833 2875765 38450839 +2623 3679368 877134 16015650 +2625 7454793 3178521 45874726 +2626 6784841 2743545 42665939 +2628 2087011 1109114 68835227 +2629 1397122 586093 29721243 +2632 813285 544065 6035656 +2634 11864341 2625111 72171351 +3006 15523750 3152106 1189743 +3009 9769206 1667440 59789500 +3016 4965620 481911 167472 +3023 29852753 4980727 13936608 +3026 14520086 5044300 5796988 +3802 1535112 760032 17882199 +3807 1212019 467239 20948722 +3808 4106953 1455913 31321341 +3815 408815 324668 2496776 +3819 3366618 1934251 32182113 +3822 75571 75571 222929443 +3824 466656 346000 53644868 +3825 563876 325029 6702247 +3830 195405 175006 50108807 +3837 608118 301249 7873546 +3846 506317 210170 20060195 +3853 731615 409051 3384292 +3909 1140033 558766 5984921 +4213 1304753 500126 8678187 +4233 176189 176189 1484500 +6905 1208039 701037 1418378 +6907 828492 828492 2286260 +6910 271188 121035 0 +6924 905026 558151 0 +6925 1324841 618039 809862 +6931 675997 413004 188038 +6937 968859 387179 19138972 +6938 2480627 1373866 13662895 +6944 650496 187713 52156948 +6945 165007 60128 85238 +6948 67979 67979 0 +6949 203140 203140 1142848 +7116 943107 462769 647440 +7151 1996593 1651669 1133515 +7405 45935 45935 0 +8319 15549200 10780566 583051055 +8405 30716781 7512908 4604623 +8406 16057147 4291955 39176247 +8408 33733304 12701177 0 +8409 11662263 3415795 3829598 +8411 6575150 4759054 1023324262 +8413 22384839 5303631 879267009 +8415 5306144 2696989 0 +8416 14862884 3109542 63175054 +8418 8044222 4646059 1024393 +8423 5869220 4816080 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173806 +Canton Demande Transfert Prélèvement +101 441374 522328 6499877 +102 2867319 2705467 2413366 +117 2856114 3223933 91905910 +118 452543 313177 2453297 +119 3217948 3782265 43064810 +120 2021760 2285673 38370381 +126 5275203 3655694 0 +135 129083 782148 2295350 +140 872599 864691 4462465 +410 16033999 12128415 186718228 +413 42431927 33370598 12512294 +414 13136334 11273077 222687403 +416 5480356 5474821 74806347 +419 2341235 3892446 82630600 +420 8633424 9734048 0 +421 36968772 18374379 335272 +427 7607821 6927007 26832053 +429 22211426 17796536 27954453 +430 23591799 12802803 404505420 +505 7901413 3395650 16188929 +509 14347180 6376497 132436999 +512 13897791 8892811 82909973 +515 1777033 1245270 1771588 +516 2254115 1778607 23653875 +518 4741922 5791405 41345088 +522 12894728 10154071 20826822 +523 11340990 6320147 0 +524 4731284 4926647 30607462 +717 5042820 3781878 3648035 +722 7843630 5044599 3874220 +724 9120585 7499698 1350980 +1307 11596242 4191455 118665741 +1309 23437048 6864789 1095579683 +1312 7668445 4395720 366161408 +1326 31392570 10055591 318905499 +1327 50584659 27278106 752308731 +1331 6069724 1613171 461656691 +1333 2350611 1833231 0 +2103 4457515 3333395 2442947 +2114 6095861 4417195 5050277 +2134 6187344 4686429 2109520 +2138 2217724 772818 945883 +2602 25070403 18633504 143483131 +2604 20962936 9301970 13594550 +2607 34470751 20820440 63587596 +2611 6646265 6376634 20631386 +2613 10473926 7464314 38013023 +2615 22631915 10432088 50325536 +2616 18105822 12070481 61577479 +2619 21540204 10666931 32457630 +2621 15578671 10529341 38450839 +2623 5699256 4226952 16015650 +2625 19071559 10250082 45874726 +2626 12990099 10309429 42665939 +2628 7215574 6099361 68835227 +2629 1692048 1273857 29721243 +2632 1575751 1072754 6035656 +2634 10741635 5001113 72171351 +3006 17702597 10952308 1189743 +3009 11083688 4510763 59789500 +3016 4240129 1253281 167472 +3023 23964481 16746315 13936608 +3026 22280953 13409756 5796988 +3802 6884789 5714644 17882199 +3807 5602651 5586053 20948722 +3808 11952832 6680628 31321341 +3815 2851329 2712177 2496776 +3819 10741922 7555864 32182113 +3822 617878 617878 222929443 +3824 4075349 3080813 53644868 +3825 3046499 3492080 6702247 +3830 2248660 1774953 50108807 +3837 2982860 2679379 7873546 +3846 2049273 1823285 20060195 +3853 3529930 3606006 3384292 +3909 3786564 4805726 5984921 +4213 5662717 2923026 8678187 +4233 1995333 1614366 1484500 +6905 7489944 5235369 1418378 +6907 4358421 4186006 2286260 +6910 1557845 955425 0 +6924 5570581 2633082 0 +6925 3352849 2670373 809862 +6931 4359860 2376899 188038 +6937 3779682 2950503 19138972 +6938 7518735 4874973 13662895 +6944 2038024 1183694 52156948 +6945 418889 352959 85238 +6948 323308 320480 0 +6949 982295 1270914 1142848 +7116 4731356 3038578 647440 +7151 11329439 9665621 1133515 +7405 550889 544743 0 +8319 25118738 19859224 583051055 +8405 23695541 9960035 4604623 +8406 27996855 13312587 39176247 +8408 25468053 18572692 0 +8409 25805567 14574816 3829598 +8411 18939548 16813471 1023324262 +8413 28047239 10431173 879267009 +8415 11963899 7002556 0 +8416 30968480 16458393 63175054 +8418 11713891 10474559 1024393 +8423 6905927 7545493 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151029_173806 +Canton Demande Transfert Prélèvement +101 441374 522328 6499877 +102 2867319 2705467 2413366 +117 2856114 3223933 91905910 +118 452543 313177 2453297 +119 3217948 3782265 43064810 +120 2021760 2285673 38370381 +126 5275203 3655694 0 +135 129083 782148 2295350 +140 872599 864691 4462465 +410 16033999 12128415 186718228 +413 42431927 33370598 12512294 +414 13136334 11273077 222687403 +416 5480356 5474821 74806347 +419 2341235 3892446 82630600 +420 8633424 9734048 0 +421 36968772 18374379 335272 +427 7607821 6927007 26832053 +429 22211426 17796536 27954453 +430 23591799 12802803 404505420 +505 7901413 3395650 16188929 +509 14347180 6376497 132436999 +512 13897791 8892811 82909973 +515 1777033 1245270 1771588 +516 2254115 1778607 23653875 +518 4741922 5791405 41345088 +522 12894728 10154071 20826822 +523 11340990 6320147 0 +524 4731284 4926647 30607462 +717 5042820 3781878 3648035 +722 7843630 5044599 3874220 +724 9120585 7499698 1350980 +1307 11596242 4191455 118665741 +1309 23437048 6864789 1095579683 +1312 7668445 4395720 366161408 +1326 31392570 10055591 318905499 +1327 50584659 27278106 752308731 +1331 6069724 1613171 461656691 +1333 2350611 1833231 0 +2103 4457515 3333395 2442947 +2114 6095861 4417195 5050277 +2134 6187344 4686429 2109520 +2138 2217724 772818 945883 +2602 25070403 18633504 143483131 +2604 20962936 9301970 13594550 +2607 34470751 20820440 63587596 +2611 6646265 6376634 20631386 +2613 10473926 7464314 38013023 +2615 22631915 10432088 50325536 +2616 18105822 12070481 61577479 +2619 21540204 10666931 32457630 +2621 15578671 10529341 38450839 +2623 5699256 4226952 16015650 +2625 19071559 10250082 45874726 +2626 12990099 10309429 42665939 +2628 7215574 6099361 68835227 +2629 1692048 1273857 29721243 +2632 1575751 1072754 6035656 +2634 10741635 5001113 72171351 +3006 17702597 10952308 1189743 +3009 11083688 4510763 59789500 +3016 4240129 1253281 167472 +3023 23964481 16746315 13936608 +3026 22280953 13409756 5796988 +3802 6884789 5714644 17882199 +3807 5602651 5586053 20948722 +3808 11952832 6680628 31321341 +3815 2851329 2712177 2496776 +3819 10741922 7555864 32182113 +3822 617878 617878 222929443 +3824 4075349 3080813 53644868 +3825 3046499 3492080 6702247 +3830 2248660 1774953 50108807 +3837 2982860 2679379 7873546 +3846 2049273 1823285 20060195 +3853 3529930 3606006 3384292 +3909 3786564 4805726 5984921 +4213 5662717 2923026 8678187 +4233 1995333 1614366 1484500 +6905 7489944 5235369 1418378 +6907 4358421 4186006 2286260 +6910 1557845 955425 0 +6924 5570581 2633082 0 +6925 3352849 2670373 809862 +6931 4359860 2376899 188038 +6937 3779682 2950503 19138972 +6938 7518735 4874973 13662895 +6944 2038024 1183694 52156948 +6945 418889 352959 85238 +6948 323308 320480 0 +6949 982295 1270914 1142848 +7116 4731356 3038578 647440 +7151 11329439 9665621 1133515 +7405 550889 544743 0 +8319 25118738 19859224 583051055 +8405 23695541 9960035 4604623 +8406 27996855 13312587 39176247 +8408 25468053 18572692 0 +8409 25805567 14574816 3829598 +8411 18939548 16813471 1023324262 +8413 28047239 10431173 879267009 +8415 11963899 7002556 0 +8416 30968480 16458393 63175054 +8418 11713891 10474559 1024393 +8423 6905927 7545493 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151030_165000 +Canton Demande Transfert Prélèvement +101 441374 522328 6499877 +102 2867319 2705467 2413366 +117 2856114 3223933 91905910 +118 452543 313177 2453297 +119 3217948 3782265 43064810 +120 2021760 2285673 38370381 +126 5275203 3655694 0 +135 129083 782148 2295350 +140 872599 864691 4462465 +410 16465601 11258616 186718228 +413 42431927 33370598 12512294 +414 13136334 11273077 222687403 +416 5480356 5474821 74806347 +419 2846076 2846076 82630600 +420 9522438 7787772 0 +421 36968772 18374379 335272 +427 7607821 6927007 26832053 +429 22211426 17796536 27954453 +430 23591799 12802803 404505420 +505 7901413 3395650 16188929 +509 14347180 6376497 132436999 +512 13897791 8892811 82909973 +515 1777033 1245270 1771588 +516 2254115 1778607 23653875 +518 4741922 5791405 41345088 +522 12894728 10154071 20826822 +523 11340990 6320147 0 +524 4731284 4926647 30607462 +717 5042820 3781878 3648035 +722 7843630 5044599 3874220 +724 9120585 7499698 1350980 +1307 11596242 4191455 118665741 +1309 23437048 6864789 1095579683 +1312 7668445 4395720 366161408 +1326 31392570 10055591 318905499 +1327 50584659 27278106 752308731 +1331 6069724 1613171 461656691 +1333 2157143 1555147 0 +2103 4457515 3333395 2442947 +2114 6095861 4417195 5050277 +2134 6187344 4686429 2109520 +2138 2217724 772818 945883 +2602 25070403 18633504 143483131 +2604 20962936 9301970 13594550 +2607 34470751 20820440 63587596 +2611 6646265 6376634 20631386 +2613 10473926 7464314 38013023 +2615 22631915 10432088 50325536 +2616 18105822 12070481 61577479 +2619 21540204 10666931 32457630 +2621 15578671 10529341 38450839 +2623 5699256 4226952 16015650 +2625 19071559 10250082 45874726 +2626 12990099 10309429 42665939 +2628 7215574 6099361 68835227 +2629 1692048 1273857 29721243 +2632 1575751 1072754 6035656 +2634 10741635 5001113 72171351 +3006 17702597 10952308 1189743 +3009 11082188 4501270 59789500 +3016 4240129 1253281 167472 +3023 27000363 13055633 13936608 +3026 22338097 12812117 5796988 +3802 6884789 5714644 17882199 +3807 5602651 5586053 20948722 +3808 11952832 6680628 31321341 +3815 2851329 2712177 2496776 +3819 10741922 7555864 32182113 +3822 617878 617878 222929443 +3824 4075349 3080813 53644868 +3825 3046499 3492080 6702247 +3830 2248660 1774953 50108807 +3837 2982860 2679379 7873546 +3846 2049273 1823285 20060195 +3853 3529930 3606006 3384292 +3909 3786564 4805726 5984921 +4213 5662717 2923026 8678187 +4233 1995333 1614366 1484500 +6905 7489944 5235369 1418378 +6907 4358421 4186006 2286260 +6910 1557845 955425 0 +6924 5570581 2633082 0 +6925 3352849 2670373 809862 +6931 4359860 2376899 188038 +6937 3779682 2950503 19138972 +6938 7518735 4874973 13662895 +6944 2038024 1183694 52156948 +6945 418889 352959 85238 +6948 323308 320480 0 +6949 982295 1270914 1142848 +7116 4731356 3038578 647440 +7151 11329439 9665621 1133515 +7405 550889 544743 0 +8319 25118738 19859224 583051055 +8405 23695541 9960035 4604623 +8406 27996855 13312587 39176247 +8408 25468053 18572692 0 +8409 25805567 14574816 3829598 +8411 18955265 16795840 1023324262 +8413 28060166 10415511 879267009 +8415 11963899 7002556 0 +8416 30968480 16458393 63175054 +8418 11752068 10441358 1024393 +8423 6905927 7545493 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151030_165000 +Canton Demande Transfert Prélèvement +101 441374 522328 6499877 +102 2867319 2705467 2413366 +117 2856114 3223933 91905910 +118 452543 313177 2453297 +119 3217948 3782265 43064810 +120 2021760 2285673 38370381 +126 5275203 3655694 0 +135 129083 782148 2295350 +140 872599 864691 4462465 +410 16465601 11258616 186718228 +413 42431927 33370598 12512294 +414 13136334 11273077 222687403 +416 5480356 5474821 74806347 +419 2846076 2846076 82630600 +420 9522438 7787772 0 +421 36968772 18374379 335272 +427 7607821 6927007 26832053 +429 22211426 17796536 27954453 +430 23591799 12802803 404505420 +505 7901413 3395650 16188929 +509 14347180 6376497 132436999 +512 13897791 8892811 82909973 +515 1777033 1245270 1771588 +516 2254115 1778607 23653875 +518 4741922 5791405 41345088 +522 12894728 10154071 20826822 +523 11340990 6320147 0 +524 4731284 4926647 30607462 +717 5042820 3781878 3648035 +722 7843630 5044599 3874220 +724 9120585 7499698 1350980 +1307 11596242 4191455 118665741 +1309 23437048 6864789 1095579683 +1312 7668445 4395720 366161408 +1326 31392570 10055591 318905499 +1327 50584659 27278106 752308731 +1331 6069724 1613171 461656691 +1333 2157143 1555147 0 +2103 4457515 3333395 2442947 +2114 6095861 4417195 5050277 +2134 6187344 4686429 2109520 +2138 2217724 772818 945883 +2602 25070403 18633504 143483131 +2604 20962936 9301970 13594550 +2607 34470751 20820440 63587596 +2611 6646265 6376634 20631386 +2613 10473926 7464314 38013023 +2615 22631915 10432088 50325536 +2616 18105822 12070481 61577479 +2619 21540204 10666931 32457630 +2621 15578671 10529341 38450839 +2623 5699256 4226952 16015650 +2625 19071559 10250082 45874726 +2626 12990099 10309429 42665939 +2628 7215574 6099361 68835227 +2629 1692048 1273857 29721243 +2632 1575751 1072754 6035656 +2634 10741635 5001113 72171351 +3006 17702597 10952308 1189743 +3009 11082188 4501270 59789500 +3016 4240129 1253281 167472 +3023 27000363 13055633 13936608 +3026 22338097 12812117 5796988 +3802 6884789 5714644 17882199 +3807 5602651 5586053 20948722 +3808 11952832 6680628 31321341 +3815 2851329 2712177 2496776 +3819 10741922 7555864 32182113 +3822 617878 617878 222929443 +3824 4075349 3080813 53644868 +3825 3046499 3492080 6702247 +3830 2248660 1774953 50108807 +3837 2982860 2679379 7873546 +3846 2049273 1823285 20060195 +3853 3529930 3606006 3384292 +3909 3786564 4805726 5984921 +4213 5662717 2923026 8678187 +4233 1995333 1614366 1484500 +6905 7489944 5235369 1418378 +6907 4358421 4186006 2286260 +6910 1557845 955425 0 +6924 5570581 2633082 0 +6925 3352849 2670373 809862 +6931 4359860 2376899 188038 +6937 3779682 2950503 19138972 +6938 7518735 4874973 13662895 +6944 2038024 1183694 52156948 +6945 418889 352959 85238 +6948 323308 320480 0 +6949 982295 1270914 1142848 +7116 4731356 3038578 647440 +7151 11329439 9665621 1133515 +7405 550889 544743 0 +8319 25118738 19859224 583051055 +8405 23695541 9960035 4604623 +8406 27996855 13312587 39176247 +8408 25468053 18572692 0 +8409 25805567 14574816 3829598 +8411 18955265 16795840 1023324262 +8413 28060166 10415511 879267009 +8415 11963899 7002556 0 +8416 30968480 16458393 63175054 +8418 11752068 10441358 1024393 +8423 6905927 7545493 361745512 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_150138 +Canton Demande Transfert Prélèvement +101 7426426 1176074 5736286 +102 24634604 5832310 2381571 +117 36040367 5990710 84234381 +118 7315780 835391 2187571 +119 51274544 6753487 41914048 +120 51793191 2421042 36697090 +126 62465080 7146419 0 +135 8679818 349606 2680238 +140 19266094 755264 4194571 +410 139128347 14070098 220069619 +413 15608098 4876513 10848952 +414 68823316 12195963 257086476 +416 11069543 6153800 78595143 +419 32178382 4015681 100257476 +420 110202884 8475810 0 +421 22481057 2500606 242860 +427 34581292 6864435 23294190 +429 11856241 2598172 30074878 +430 4938810 1873127 477117709 +505 79153962 2824103 16082048 +509 125196422 5473465 134828333 +512 54008372 9781866 78728476 +515 8421958 965850 2000810 +516 10670537 1612129 27357286 +518 9916446 8790808 39115762 +522 66706239 11376658 20441000 +523 74632355 7294126 0 +524 22536653 5348926 31558048 +717 50487756 3747753 3800524 +722 63055484 6119612 3188381 +724 57716281 8936782 1076143 +1307 254309228 2980270 105510356 +1309 402369208 6154061 1226460988 +1312 130211964 5354808 389025194 +1326 645482830 6553090 342713757 +1327 22320298 4044902 851938524 +1331 125776884 509468 532027580 +1333 121990405 457048 0 +2103 41359289 6813648 2094190 +2114 106202622 7355393 5030476 +2134 60890619 9640859 1899524 +2138 55287075 945755 1029048 +2602 187942394 28795636 137560762 +2604 281954485 6771017 12170476 +2607 224628131 26059560 64529426 +2611 104353746 6791882 18864524 +2613 111158480 8497753 38455857 +2615 177654409 10339225 40220667 +2616 157722183 16002424 61813667 +2619 224344442 14254296 27135351 +2621 254841665 11614199 34567976 +2623 126052256 3454972 15321317 +2625 160059284 7690556 50560369 +2626 207498996 11364828 34589649 +2628 56668054 8746064 68828429 +2629 14255578 2008109 34945421 +2632 12259262 1503744 6013900 +2634 138054597 4195322 70727276 +3006 267490142 10850534 1149098 +3009 121204977 2981476 65387022 +3016 68447861 470753 86079 +3023 440024757 13857230 15166960 +3026 310834207 13322649 5527811 +3802 113752692 5869679 16890619 +3807 104835669 4914488 19841286 +3808 187939094 6568791 27901619 +3815 36069354 3191115 2387143 +3819 133955763 11814054 32097905 +3822 2120352 1631404 204368571 +3824 67637835 4436537 51884857 +3825 45163129 3748596 5585667 +3830 38520729 3811423 51674571 +3837 44429676 2684731 7115524 +3846 26743438 2826389 18797524 +3853 51690961 3870613 3509429 +3909 63483672 5967624 5320571 +4213 53801142 3404795 8442667 +4233 13549660 1825697 1460476 +6905 104498201 9213797 1331143 +6907 13713501 8133213 2455406 +6910 13345150 1123837 0 +6924 70515978 2719917 0 +6925 44110958 3199322 919143 +6931 52043264 2378762 167238 +6937 73639399 2720457 19007143 +6938 143624842 4424733 12220095 +6944 45052714 674276 52351790 +6945 10758933 239403 105533 +6948 566376 566376 0 +6949 2563662 2563662 1042512 +7116 61603979 5721859 0 +7151 107946733 18473020 769095 +7405 2445942 1277390 0 +8319 11103538 2994342 612180480 +8405 284424424 10840090 2173006 +8406 319173224 10998887 34718190 +8408 253151853 24273166 0 +8409 465731711 8208402 0 +8411 81678532 19734934 1198138870 +8413 395412440 6450821 1039346191 +8415 85650019 4297547 0 +8416 467202420 13141097 64785321 +8418 238709030 14269175 0 +8423 37736640 12686105 423579905 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_190616- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 7426426 1176074 5736286 1.29 1853606 +117 36040367 5990710 84234381 0.43 13632359 +118 7315780 835391 2187571 3.34 999171 +119 51274544 6753487 41914048 1.22 8949409 +120 51793191 2421042 36697090 1.41 9868748 +140 19266094 755264 4194571 4.59 5586590 +518 9916446 8790808 39115762 0.25 845210 +717 50487756 3747753 3800524 13.28 4172269 +722 63055484 6119612 3188381 19.78 5120948 +724 57716281 8936782 1076143 53.63 4319174 +2602 187942394 28795636 137560762 1.37 51238951 +2604 281954485 6771017 12170476 23.17 75859092 +2607 224628131 26059560 64529426 3.48 30425399 +2611 104353746 6791882 18864524 5.53 14758256 +2613 111158480 8497753 38455857 2.89 22695509 +2615 177654409 10339225 40220667 4.42 47416297 +2616 157722183 16002424 61813667 2.55 21382183 +2619 224344442 14254296 27135351 8.27 31893631 +2621 254841665 11614199 34567976 7.37 39496222 +2623 126052256 3454972 15321317 8.23 19775804 +2625 160059284 7690556 50560369 3.17 6284005 +2626 207498996 11364828 34589649 6 31537820 +2628 56668054 8746064 68828429 0.82 13067667 +2629 14255578 2008109 34945421 0.41 7263628 +2632 12259262 1503744 6013900 2.04 5410354 +2634 138054597 4195322 70727276 1.95 37515689 +3006 267490142 10850534 1149098 232.78 32607312 +3023 440024757 13857230 15166960 29.01 11181414 +3026 310834207 13322649 5527811 56.23 6706669 +3802 113752692 5869679 16890619 6.73 8094498 +3807 104835669 4914488 19841286 5.28 9795921 +3808 187939094 6568791 27901619 6.74 18799900 +3815 36069354 3191115 2387143 15.11 2135574 +3819 133955763 11814054 32097905 4.17 9130321 +3822 2120352 1631404 204368571 0.01 474283 +3824 67637835 4436537 51884857 1.3 12831535 +3825 45163129 3748596 5585667 8.09 4334162 +3830 38520729 3811423 51674571 0.75 6434244 +3837 44429676 2684731 7115524 6.24 3789914 +3846 26743438 2826389 18797524 1.42 10593914 +3853 51690961 3870613 3509429 14.73 2770124 +4213 53801142 3404795 8442667 6.37 3023040 +4233 13549660 1825697 1460476 9.28 651327 +6907 13713501 8133213 2455406 5.59 416399 +6924 70515978 2719917 0 0 7277218 +6931 52043264 2378762 167238 311.19 3336981 +6937 73639399 2720457 19007143 3.87 14631902 +6938 143624842 4424733 12220095 11.75 8942313 +6944 45052714 674276 52351790 0.86 17275097 +6945 10758933 239403 105533 101.95 6132391 +6948 566376 566376 0 0 135041 +6949 2563662 2563662 1042512 2.46 571208 +7405 2445942 1277390 0 0 52356 +8405 284424424 10840090 2173006 130.89 16211061 +8406 319173224 10998887 34718190 9.19 27715136 +8409 465731711 8208402 0 0 51781632 +8413 395412440 6450821 1039346191 0.38 63468144 +8415 85650019 4297547 0 0 7126123 +8416 467202420 13141097 64785321 7.21 26956275 +8418 238709030 14269175 0 0 25524363 +8423 37736640 12686105 423579905 0.09 7156514 +410 139128347 14070098 220069619 0.63 16760462 +413 15608098 4876513 10848952 1.44 1829715 +414 68823316 12195963 257086476 0.27 11952538 +416 11069543 6153800 78595143 0.14 1782398 +419 32178382 4015681 100257476 0.32 5589452 +420 110202884 8475810 0 0 4002593 +421 22481057 2500606 242860 92.57 2715158 +427 34581292 6864435 23294190 1.48 9478247 +429 11856241 2598172 30074878 0.39 1632810 +430 4938810 1873127 477117709 0.01 690836 +505 79153962 2824103 16082048 4.92 5616666 +509 125196422 5473465 134828333 0.93 14868178 +512 54008372 9781866 78728476 0.69 9697487 +515 8421958 965850 2000810 4.21 2382744 +516 10670537 1612129 27357286 0.39 3705212 +522 66706239 11376658 20441000 3.26 5492904 +523 74632355 7294126 0 0 7782007 +524 22536653 5348926 31558048 0.71 2846432 +1326 645482830 6553090 342713757 1.88 147685052 +1327 22320298 4044902 851938524 0.03 2992482 +8319 11103538 2994342 612180480 0.02 2406436 +8408 253151853 24273166 0 0 21148368 +8411 81678532 19734934 1198138870 0.07 21960487 +102 24634604 5832310 2381571 10.34 1367001 +126 62465080 7146419 0 0 627741 +135 8679818 349606 2680238 3.24 292902 +2103 41359289 6813648 2094190 19.75 1834100 +2114 106202622 7355393 5030476 21.11 3292348 +2134 60890619 9640859 1899524 32.06 1704547 +2138 55287075 945755 1029048 53.73 819457 +3909 63483672 5967624 5320571 11.93 5670202 +6905 104498201 9213797 1331143 78.5 2695509 +6910 13345150 1123837 0 0 838338 +6925 44110958 3199322 919143 47.99 2553784 +7116 61603979 5721859 0 0 3003681 +7151 107946733 18473020 769095 140.36 2629052 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_190853- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 3420629 904098 5736286 0.6 853775 +117 16371486 4279784 84234381 0.19 6192555 +118 3275600 673350 2187571 1.5 447373 +119 24689371 5118239 41914048 0.59 4309259 +120 25390229 2004126 36697090 0.69 4837890 +140 10789429 692571 4194571 2.57 3128611 +518 8330235 7910280 39115762 0.21 710012 +717 38222613 3670475 3800524 10.06 3158687 +722 46816167 6179544 3188381 14.68 3802099 +724 50544848 8744615 1076143 46.97 3782502 +2602 119321675 25437706 137560762 0.87 32530805 +2604 164484747 6718477 12170476 13.52 44254176 +2607 184252951 25859687 64529426 2.86 24956667 +2611 49815829 5925494 18864524 2.64 7045217 +2613 57382995 7553336 38455857 1.49 11716032 +2615 143176095 9726104 40220667 3.56 38213970 +2616 92347532 14094083 61813667 1.49 12519430 +2619 101357999 12481765 27135351 3.74 14409426 +2621 112785388 10006579 34567976 3.26 17479860 +2623 49252971 3011733 15321317 3.21 7727090 +2625 119923535 7510043 50560369 2.37 4708256 +2626 93386599 10536242 34589649 2.7 14193850 +2628 32261462 7433194 68828429 0.47 7439501 +2629 7904817 1790253 34945421 0.23 4027733 +2632 6830973 1409711 6013900 1.14 3014699 +2634 80773107 4182888 70727276 1.14 21949713 +3006 101706901 9395906 1149098 88.51 12398171 +3023 242990267 12440814 15166960 16.02 6174595 +3026 191939888 12956690 5527811 34.72 4141363 +3802 51609314 5218123 16890619 3.06 3672454 +3807 46514743 4327572 19841286 2.34 4346371 +3808 85335558 5806084 27901619 3.06 8536276 +3815 15858686 2763665 2387143 6.64 938952 +3819 63870795 9454426 32097905 1.99 4353384 +3822 1365771 1279050 204368571 0.01 305497 +3824 36440000 3831261 51884857 0.7 6913012 +3825 19808057 3252036 5585667 3.55 1900916 +3830 19929486 3167813 51674571 0.39 3328887 +3837 19023771 2438040 7115524 2.67 1622755 +3846 11234857 1956756 18797524 0.6 4450479 +3853 22035143 3466321 3509429 6.28 1180865 +4213 42024474 3418986 8442667 4.98 2361319 +4233 11247694 1829025 1460476 7.7 540672 +6907 13371378 8098285 2455406 5.45 406011 +6924 56948351 2720285 0 0 5877045 +6931 41888471 2338518 167238 250.47 2685862 +6937 31757829 2180322 19007143 1.67 6310174 +6938 60326246 3902751 12220095 4.94 3756009 +6944 18097371 610613 52351790 0.35 6939290 +6945 4280343 239403 105533 40.56 2439716 +6948 566373 566373 0 0 135041 +6949 1726000 1726000 1042512 1.66 384569 +7405 1666233 1098927 0 0 35666 +8405 122304588 9211486 2173006 56.28 6970875 +8406 152935501 9884113 34718190 4.41 13280025 +8409 256746333 7904263 0 0 28545929 +8413 190138236 6385197 1039346191 0.18 30519325 +8415 80599453 4337190 0 0 6705913 +8416 217486679 11586297 64785321 3.36 12548374 +8418 149555765 13793208 0 0 15991501 +8423 23341883 10752012 423579905 0.06 4426640 +410 119005697 13759444 220069619 0.54 14336334 +413 13031174 4871648 10848952 1.2 1527626 +414 59759943 11210264 257086476 0.23 10378503 +416 11128201 6212458 78595143 0.14 1791843 +419 28332789 3760971 100257476 0.28 4921465 +420 90085251 8483050 0 0 3271917 +421 10185543 1991723 242860 41.94 1230163 +427 32494973 6851755 23294190 1.39 8906416 +429 6001692 2222708 30074878 0.2 826537 +430 4694654 1628975 477117709 0.01 656684 +505 60234753 2825464 16082048 3.75 4274183 +509 97560574 5426185 134828333 0.72 11586178 +512 52953580 9038599 78728476 0.67 9508093 +515 8421958 965850 2000810 4.21 2382744 +516 10442128 1613340 27357286 0.38 3625900 +522 63453427 11379645 20441000 3.1 5225052 +523 58503681 7048867 0 0 6100251 +524 21874278 5346393 31558048 0.69 2762773 +1326 253777371 5659430 342713757 0.74 58063705 +1327 18384206 4039810 851938524 0.02 2464771 +8319 7841519 2418923 612180480 0.01 1699469 +8408 157440720 21080452 0 0 13152636 +8411 60957142 16667112 1198138870 0.05 16389233 +102 13820517 5365809 2381571 5.8 766916 +126 39110349 6623292 0 0 393038 +135 2400743 316019 2680238 0.9 81014 +2103 30036922 6372985 2094190 14.34 1332004 +2114 59950686 6946768 5030476 11.92 1858509 +2134 44106133 9199992 1899524 23.22 1234689 +2138 31745289 925284 1029048 30.85 470524 +3909 28214662 4921179 5320571 5.3 2520063 +6905 60592201 8756573 1331143 45.52 1562963 +6910 10867975 1059435 0 0 682723 +6925 28163210 2815230 919143 30.64 1630496 +7116 39613015 5495679 0 0 1931448 +7151 67099156 17945021 769095 87.24 1634206 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_191636- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 5488846 989374 5736286 0.96 1369994 +117 26992413 4805907 84234381 0.32 10209948 +118 4704512 763282 2187571 2.15 642531 +119 38632144 5733936 41914048 0.92 6742817 +120 40420623 2084069 36697090 1.1 7701803 +140 15150080 672868 4194571 3.61 4393069 +518 8231492 7808833 39115762 0.21 701596 +717 37548587 3647082 3800524 9.88 3102986 +722 49407550 5505537 3188381 15.5 4012554 +724 44026531 8154158 1076143 40.91 3294707 +2602 148214101 25263414 137560762 1.08 40407781 +2604 222881988 6504485 12170476 18.31 59965796 +2607 179271289 24349506 64529426 2.78 24281912 +2611 82227878 6056720 18864524 4.36 11629100 +2613 85816937 7403271 38455857 2.23 17521462 +2615 143179143 9343905 40220667 3.56 38214783 +2616 124801550 14343736 61813667 2.02 16919177 +2619 174465417 12944116 27135351 6.43 24802645 +2621 195640581 10560016 34567976 5.66 30321038 +2623 97830535 3143527 15321317 6.39 15348218 +2625 123055826 7297489 50560369 2.43 4831231 +2626 160570997 10585888 34589649 4.64 24405222 +2628 45266044 7475131 68828429 0.66 10438361 +2629 11303570 1754410 34945421 0.32 5759495 +2632 9684106 1418869 6013900 1.61 4273866 +2634 109659563 3991245 70727276 1.55 29799472 +3006 211056318 9797023 1149098 183.67 25727973 +3023 349980911 12544763 15166960 23.08 8893321 +3026 241303466 12565208 5527811 43.65 5206449 +3802 88099218 5351575 16890619 5.22 6269029 +3807 79795861 4644976 19841286 4.02 7456183 +3808 143728078 6030760 27901619 5.15 14377389 +3815 26768369 2920014 2387143 11.21 1584886 +3819 103076375 9843342 32097905 3.21 7025606 +3822 1560593 1361644 204368571 0.01 349075 +3824 52972249 3861787 51884857 1.02 10049335 +3825 33276073 3371667 5585667 5.96 3193399 +3830 31288015 3336115 51674571 0.61 5226140 +3837 32812545 2599940 7115524 4.61 2798957 +3846 20343828 2219756 18797524 1.08 8058827 +3853 38826078 3608545 3509429 11.06 2080693 +4213 42037063 2907599 8442667 4.98 2362027 +4233 9971210 1719780 1460476 6.83 479312 +6907 10952164 7135921 2455406 4.46 332553 +6924 53061915 2619630 0 0 5475966 +6931 38997780 2256719 167238 233.19 2500513 +6937 56558008 2479100 19007143 2.98 11237887 +6938 111796707 3985421 12220095 9.15 6960642 +6944 34171787 673293 52351790 0.65 13102894 +6945 8091638 239403 105533 76.67 4612083 +6948 487991 487991 0 0 116352 +6949 1969215 1969215 1042512 1.89 438759 +7405 1700147 1096726 0 0 36392 +8405 224137604 9573590 2173006 103.15 12774952 +8406 249515176 10181013 34718190 7.19 21666439 +8409 358060386 7763633 0 0 39810369 +8413 311306811 6091815 1039346191 0.3 49968245 +8415 67525121 4106369 0 0 5618122 +8416 349542897 12315655 64785321 5.4 20167649 +8418 184814799 13215933 0 0 19761632 +8423 32050831 11007630 423579905 0.08 6078237 +410 108535642 13172040 220069619 0.49 13075031 +413 12655886 4558039 10848952 1.17 1483631 +414 53852525 11007420 257086476 0.21 9352562 +416 9440499 5529298 78595143 0.12 1520093 +419 24956730 3776615 100257476 0.25 4335036 +420 86864443 7988818 0 0 3154936 +421 16522839 2168430 242860 68.03 1995552 +427 27568439 6240524 23294190 1.18 7556122 +429 8611077 2272992 30074878 0.29 1185895 +430 4021620 1596096 477117709 0.01 562541 +505 62155343 2686735 16082048 3.86 4410465 +509 99287371 5127984 134828333 0.74 11791250 +512 44074307 8657783 78728476 0.56 7913773 +515 6553053 950162 2000810 3.28 1853993 +516 8419556 1514088 27357286 0.31 2923587 +522 54650639 10629425 20441000 2.67 4500189 +523 59961976 6609884 0 0 6252309 +524 18273572 5032995 31558048 0.58 2307995 +1326 509265308 5985864 342713757 1.49 116518783 +1327 17981604 3649884 851938524 0.02 2410794 +8319 8543597 2530891 612180480 0.01 1851628 +8408 206559437 21539050 0 0 17256026 +8411 64330087 17625106 1198138870 0.05 17296100 +102 17196717 5084727 2381571 7.22 954265 +126 47076706 6539129 0 0 473096 +135 5963279 349607 2680238 2.22 201232 +2103 30897371 6145673 2094190 14.75 1370161 +2114 77705243 7131507 5030476 15.45 2408911 +2134 45756932 8388077 1899524 24.09 1280900 +2138 41122707 926209 1029048 39.96 609515 +3909 47536705 5144151 5320571 8.93 4245859 +6905 78258053 8148854 1331143 58.79 2018650 +6910 9942445 1100250 0 0 624581 +6925 34062388 2782443 919143 37.06 1972027 +7116 46370117 5116567 0 0 2260910 +7151 82552113 16362161 769095 107.34 2010564 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_194342- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 3420629 904098 5736286 0.6 853775 +117 16371486 4279784 84234381 0.19 6192555 +118 3264400 673350 2187571 1.49 445844 +119 24705662 5113532 41914048 0.59 4312102 +120 25319899 1997728 36697090 0.69 4824490 +140 11075785 669000 4194571 2.64 3211646 +518 7839638 7604648 39115762 0.2 668197 +717 33824897 3640763 3800524 8.9 2795263 +722 42933329 5506792 3188381 13.47 3486760 +724 42432698 8076818 1076143 39.43 3175433 +2602 114001185 24854541 137560762 0.83 31080274 +2604 158764617 6471800 12170476 13.05 42715191 +2607 164356260 24303083 64529426 2.55 22261703 +2611 49882189 5917307 18864524 2.64 7054602 +2613 55285085 7168600 38455857 1.44 11287696 +2615 130055460 9220540 40220667 3.23 34712048 +2616 88832177 13983447 61813667 1.44 12042858 +2619 99244091 12468696 27135351 3.66 14108905 +2621 112475252 10008715 34567976 3.25 17431794 +2623 49299257 3008823 15321317 3.22 7734351 +2625 106753731 7259130 50560369 2.11 4191203 +2626 92865207 10382373 34589649 2.68 14114604 +2628 32115943 7236502 68828429 0.47 7405944 +2629 7729921 1747090 34945421 0.22 3938618 +2632 6713616 1417259 6013900 1.12 2962906 +2634 78995932 3986498 70727276 1.12 21466774 +3006 101250982 9079980 1149098 88.11 12342594 +3023 233148305 12222436 15166960 15.37 5924503 +3026 177895768 12473824 5527811 32.18 3838342 +3802 51769149 5190604 16890619 3.06 3683827 +3807 46457943 4336214 19841286 2.34 4341064 +3808 82553133 5692205 27901619 2.96 8257945 +3815 15858686 2763682 2387143 6.64 938952 +3819 61044092 9229945 32097905 1.9 4160718 +3822 1323796 1239385 204368571 0.01 296108 +3824 36271882 3766387 51884857 0.7 6881118 +3825 19759345 3243141 5585667 3.54 1896242 +3830 19867058 3150535 51674571 0.38 3318460 +3837 19023771 2438062 7115524 2.67 1622755 +3846 11234857 1956807 18797524 0.6 4450479 +3853 22035143 3466334 3509429 6.28 1180865 +4213 37690253 2909674 8442667 4.46 2117783 +4233 9659779 1716627 1460476 6.61 464342 +6907 10925460 7135725 2455406 4.45 331743 +6924 49244174 2617659 0 0 5081977 +6931 37046050 2256981 167238 221.52 2375370 +6937 31757829 2180412 19007143 1.67 6310174 +6938 59531348 3842509 12220095 4.87 3706517 +6944 18097371 610613 52351790 0.35 6939290 +6945 4280343 239403 105533 40.56 2439716 +6948 487991 487991 0 0 116352 +6949 1726000 1726000 1042512 1.66 384569 +7405 1600232 1052548 0 0 34253 +8405 120698449 9107050 2173006 55.54 6879331 +8406 143759706 9751902 34718190 4.14 12483252 +8409 242944749 7641259 0 0 27011422 +8413 176036022 6075358 1039346191 0.17 28255761 +8415 66444287 4153873 0 0 5528196 +8416 209611630 11592077 64785321 3.24 12094006 +8418 140282161 13086647 0 0 14999905 +8423 23173181 10707854 423579905 0.05 4394647 +410 103914317 13089837 220069619 0.47 12518311 +413 11735524 4552541 10848952 1.08 1375739 +414 52105824 10680649 257086476 0.2 9049212 +416 9451342 5540141 78595143 0.12 1521838 +419 24329562 3707815 100257476 0.24 4226096 +420 81091313 7990515 0 0 2945255 +421 10185543 1991723 242860 41.94 1230163 +427 27071383 6237607 23294190 1.16 7419886 +429 5886358 2102696 30074878 0.2 810654 +430 3935143 1509619 477117709 0.01 550444 +505 54284295 2696148 16082048 3.38 3851946 +509 86345063 5113561 134828333 0.64 10254237 +512 43763770 8349432 78728476 0.56 7858014 +515 6553053 950162 2000810 3.28 1853993 +516 8429560 1515243 27357286 0.31 2927061 +522 54196483 10628981 20441000 2.65 4462792 +523 50798881 6568210 0 0 5296862 +524 18272055 5030115 31558048 0.58 2307804 +1326 253574505 5659238 342713757 0.74 58017289 +1327 16728575 3649946 851938524 0.02 2242800 +8319 7412835 2342332 612180480 0.01 1606561 +8408 153202266 20798144 0 0 12798555 +8411 55697905 16511125 1198138870 0.05 14975209 +102 13446906 4992198 2381571 5.65 746183 +126 36788393 6405547 0 0 369704 +135 2400743 316019 2680238 0.9 81014 +2103 27097178 6064330 2094190 12.94 1201639 +2114 59950686 6946735 5030476 11.92 1858509 +2134 39954103 8267201 1899524 21.03 1118458 +2138 30337066 925257 1029048 29.48 449651 +3909 27983927 4789000 5320571 5.26 2499454 +6905 59789371 8102951 1331143 44.92 1542254 +6910 9412623 1056635 0 0 591298 +6925 26691397 2689139 919143 29.04 1545286 +7116 37167433 5028880 0 0 1812206 +7151 63569484 16284879 769095 82.65 1548240 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195241- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 2341729 761921 5736286 0.41 584486 +117 13611943 3403735 84234381 0.16 5148751 +118 2405113 520843 2187571 1.1 328484 +119 20019295 4037149 41914048 0.48 3494148 +120 19199685 1617490 36697090 0.52 3658335 +140 7470388 555417 4194571 1.78 2166189 +518 6072986 5973098 39115762 0.16 517620 +717 17094634 3165690 3800524 4.5 1412687 +722 25730671 4154819 3188381 8.07 2089674 +724 23330679 6329550 1076143 21.68 1745942 +2602 85393149 20913682 137560762 0.62 23280832 +2604 124115528 5697101 12170476 10.2 33392947 +2607 107406176 20893738 64529426 1.66 14547936 +2611 42827739 4929098 18864524 2.27 6056924 +2613 43742462 5418314 38455857 1.14 8931010 +2615 85445199 8040812 40220667 2.12 22805485 +2616 74715755 12084643 61813667 1.21 10129114 +2619 99539112 11248359 27135351 3.67 14150846 +2621 103393799 8480434 34567976 2.99 16024320 +2623 48013348 2703254 15321317 3.13 7532611 +2625 62755639 6372097 50560369 1.24 2463817 +2626 83903845 8729449 34589649 2.43 12752564 +2628 23114078 5145240 68828429 0.34 5330112 +2629 6672340 1463447 34945421 0.19 3399750 +2632 5374353 1265407 6013900 0.89 2371852 +2634 62205719 3492916 70727276 0.88 16904112 +3006 117206301 7379490 1149098 102 14287564 +3023 199980774 9832495 15166960 13.19 5081687 +3026 129195446 10509616 5527811 23.37 2787567 +3802 42553212 4340308 16890619 2.52 3028033 +3807 41321048 3407804 19841286 2.08 3861069 +3808 71917576 4810994 27901619 2.58 7194050 +3815 12622648 2388937 2387143 5.29 747354 +3819 50326678 7578489 32097905 1.57 3430227 +3822 883148 882511 204368571 0 197544 +3824 22125130 2878897 51884857 0.43 4197346 +3825 15532646 2784122 5585667 2.78 1490619 +3830 12513456 2257665 51674571 0.24 2090164 +3837 16196735 2115515 7115524 2.28 1381604 +3846 10396251 1590815 18797524 0.55 4118281 +3853 18346389 3036171 3509429 5.23 983185 +4213 21039191 2136570 8442667 2.49 1182174 +4233 3825640 1438618 1460476 2.62 183897 +6907 6581556 5043707 2455406 2.68 199844 +6924 25538485 2125597 0 0 2635560 +6931 18537988 1836014 167238 110.85 1188644 +6937 28468180 1927969 19007143 1.5 5656532 +6938 53103503 3432937 12220095 4.35 3306309 +6944 17116491 564979 52351790 0.33 6563179 +6945 3646145 239403 105533 34.55 2078235 +6948 343819 343819 0 0 81977 +6949 1439043 1439043 1042512 1.38 320632 +7405 805970 708877 0 0 17252 +8405 131775687 8109062 2173006 60.64 7510690 +8406 133350152 8207091 34718190 3.84 11579347 +8409 175437246 6780109 0 0 19505709 +8413 168618336 5524178 1039346191 0.16 27065139 +8415 36694311 3658207 0 0 3052984 +8416 167587633 9849951 64785321 2.59 9669338 +8418 97304073 10871341 0 0 10404401 +8423 23230097 8896959 423579905 0.05 4405440 +410 57686710 10857030 220069619 0.26 6949381 +413 7678803 3625731 10848952 0.71 900175 +414 28730502 8650110 257086476 0.11 4989623 +416 6433231 4272668 78595143 0.08 1035868 +419 12915511 3112384 100257476 0.13 2243451 +420 48347902 6455606 0 0 1756007 +421 7482909 1598709 242860 30.81 903751 +427 15257691 5089449 23294190 0.65 4181919 +429 4288938 1715821 30074878 0.14 590661 +430 2565183 1185642 477117709 0.01 358815 +505 33585110 2338153 16082048 2.09 2383157 +509 55275553 4487985 134828333 0.41 6564459 +512 26709049 6555539 78728476 0.34 4795750 +515 3584295 863013 2000810 1.79 1014071 +516 4650743 1268064 27357286 0.17 1614913 +522 34405608 8979685 20441000 1.68 2833119 +523 35268063 5531416 0 0 3677444 +524 11582580 4261015 31558048 0.37 1462907 +1326 288776775 4670920 342713757 0.84 66071491 +1327 11101462 3012050 851938524 0.01 1488373 +8319 4734333 1859051 612180480 0.01 1026058 +8408 128690152 18582282 0 0 10750807 +8411 34040124 13560218 1198138870 0.03 9152193 +102 6898556 3388769 2381571 2.9 382808 +126 22003810 4507407 0 0 221127 +135 1545847 286545 2680238 0.58 52165 +2103 14937501 4321330 2094190 7.13 662412 +2114 32371059 5454727 5030476 6.43 1003523 +2134 21219385 5914196 1899524 11.17 594007 +2138 17743929 780319 1029048 17.24 262998 +3909 23781083 3908918 5320571 4.47 2124067 +6905 35379327 5988591 1331143 26.58 912602 +6910 4990309 852636 0 0 313490 +6925 17353595 2179915 919143 18.88 1004679 +7116 21791419 3639785 0 0 1062504 +7151 36787632 11936776 769095 47.83 895966 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195555- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 2265331 761921 5736286 0.39 565418 +117 12914545 3403740 84234381 0.15 4884959 +118 2375989 520836 2187571 1.09 324507 +119 19394242 4037171 41914048 0.46 3385052 +120 18530018 1617512 36697090 0.5 3530736 +140 7421114 555390 4194571 1.77 2151901 +518 6072986 5973098 39115762 0.16 517620 +717 17093153 3165688 3800524 4.5 1412565 +722 25550860 4154767 3188381 8.01 2075071 +724 23330679 6329550 1076143 21.68 1745942 +2602 81004048 20915785 137560762 0.59 22084227 +2604 114896429 5693677 12170476 9.44 30912574 +2607 107030368 20893281 64529426 1.66 14497034 +2611 39024318 4932032 18864524 2.07 5519025 +2613 39039836 5419086 38455857 1.02 7970862 +2615 85420838 8042025 40220667 2.12 22798983 +2616 69527070 12083086 61813667 1.12 9425691 +2619 82691108 11244710 27135351 3.05 11755672 +2621 90829227 8478752 34567976 2.63 14077020 +2623 40575465 2701275 15321317 2.65 6365713 +2625 61856880 6374261 50560369 1.22 2428531 +2626 74398370 8736583 34589649 2.15 11307825 +2628 22214204 5149199 68828429 0.32 5122601 +2629 6194281 1462599 34945421 0.18 3156165 +2632 5008002 1265099 6013900 0.83 2210171 +2634 58526731 3496064 70727276 0.83 15904365 +3006 87730592 7379550 1149098 76.35 10694446 +3023 177428863 9832498 15166960 11.7 4508623 +3026 120238600 10506864 5527811 21.75 2594311 +3802 39269403 4340135 16890619 2.32 2794361 +3807 37516334 3407179 19841286 1.89 3505553 +3808 64390656 4810671 27901619 2.31 6441118 +3815 11843882 2387708 2387143 4.96 701246 +3819 45755104 7578287 32097905 1.43 3118632 +3822 883148 882511 204368571 0 197544 +3824 21638004 2879859 51884857 0.42 4104933 +3825 14547252 2783827 5585667 2.6 1396054 +3830 12420630 2257664 51674571 0.24 2074659 +3837 14728244 2117538 7115524 2.07 1256340 +3846 9228173 1587573 18797524 0.49 3655569 +3853 16680195 3036067 3509429 4.75 893893 +4213 20868601 2134407 8442667 2.47 1172589 +4233 3825640 1438618 1460476 2.62 183897 +6907 6581556 5043707 2455406 2.68 199844 +6924 25530832 2125595 0 0 2634771 +6931 18537983 1836015 167238 110.85 1188644 +6937 25303987 1929126 19007143 1.33 5027818 +6938 46705076 3435104 12220095 3.82 2907933 +6944 14820403 564475 52351790 0.28 5682763 +6945 3269613 239403 105533 30.98 1863618 +6948 343819 343819 0 0 81977 +6949 1439043 1439043 1042512 1.38 320632 +7405 805970 708877 0 0 17252 +8405 102709397 8086512 2173006 47.27 5854027 +8406 109622052 8208861 34718190 3.16 9518938 +8409 161084224 6789379 0 0 17909891 +8413 134874503 5525338 1039346191 0.13 21648875 +8415 36685630 3658901 0 0 3052262 +8416 148052384 9855129 64785321 2.29 8542209 +8418 91676780 10869950 0 0 9802693 +8423 21042430 8896959 423579905 0.05 3990563 +410 57686710 10857030 220069619 0.26 6949381 +413 7652014 3625731 10848952 0.71 897035 +414 28730502 8650110 257086476 0.11 4989623 +416 6433231 4272668 78595143 0.08 1035868 +419 12915511 3112384 100257476 0.13 2243451 +420 48347902 6455606 0 0 1756007 +421 7272366 1598709 242860 29.94 878323 +427 15257690 5089449 23294190 0.65 4181919 +429 4146324 1715821 30074878 0.14 571021 +430 2565183 1185642 477117709 0.01 358815 +505 33371884 2339158 16082048 2.08 2368027 +509 54477933 4486962 134828333 0.4 6469735 +512 26709049 6555539 78728476 0.34 4795750 +515 3584295 863013 2000810 1.79 1014071 +516 4650743 1268064 27357286 0.17 1614913 +522 34405608 8979684 20441000 1.68 2833119 +523 33957698 5531425 0 0 3540811 +524 11582581 4261014 31558048 0.37 1462907 +1326 221338700 4669405 342713757 0.65 50641808 +1327 11101462 3012050 851938524 0.01 1488373 +8319 4713478 1859051 612180480 0.01 1021538 +8408 118902232 18582211 0 0 9933122 +8411 33837989 13563395 1198138870 0.03 9097846 +102 6898506 3388770 2381571 2.9 382806 +126 21866704 4507407 0 0 219749 +135 1536609 286545 2680238 0.57 51853 +2103 14890091 4321331 2094190 7.11 660309 +2114 32244904 5454725 5030476 6.41 999612 +2134 21130010 5915152 1899524 11.12 591505 +2138 17361943 780319 1029048 16.87 257336 +3909 21832366 3909273 5320571 4.1 1950012 +6905 34884680 5988591 1331143 26.21 899843 +6910 4990309 852636 0 0 313490 +6925 16922190 2179934 919143 18.41 979703 +7116 21525194 3639852 0 0 1049523 +7151 36146475 11936786 769095 47 880351 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_222908- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 172491 89854 5736286 0.03 43053 +117 1206778 627923 84234381 0.01 456467 +118 105066 89831 2187571 0.05 14350 +119 1265181 799671 41914048 0.03 220823 +120 1026833 437738 36697090 0.03 195654 +140 937245 207009 4194571 0.22 271773 +518 1639446 1639446 39115762 0.04 139735 +717 2012119 1196537 3800524 0.53 166280 +722 5455554 1427745 3188381 1.71 443064 +724 5030893 2312414 1076143 4.67 376485 +2602 23224079 9441881 137560762 0.17 6331607 +2604 33446098 3077182 12170476 2.75 8998582 +2607 37280872 12033809 64529426 0.58 5049614 +2611 7004935 1690281 18864524 0.37 990675 +2613 8606761 1868993 38455857 0.22 1757264 +2615 29735724 4781365 40220667 0.74 7936521 +2616 24288543 5919056 61813667 0.39 3292765 +2619 31200623 5820158 27135351 1.15 4435595 +2621 19910134 3105952 34567976 0.58 3085740 +2623 7507240 909852 15321317 0.49 1177779 +2625 14042471 3313047 50560369 0.28 551314 +2626 15837521 3227111 34589649 0.46 2407148 +2628 4128019 1479588 68828429 0.06 951922 +2629 2014108 602483 34945421 0.06 1026246 +2632 1379373 591389 6013900 0.23 608756 +2634 17785367 1859967 70727276 0.25 4833090 +3006 31669877 3097690 1149098 27.56 3860589 +3023 61139295 4736471 15166960 4.03 1553603 +3026 30925117 5537946 5527811 5.59 667251 +3802 4810027 1203546 16890619 0.28 342275 +3807 4104051 762899 19841286 0.21 383486 +3808 10426280 1678271 27901619 0.37 1042960 +3815 1194661 633110 2387143 0.5 70733 +3819 7759241 2622833 32097905 0.24 528864 +3822 165356 165356 204368571 0 36987 +3824 1651478 634216 51884857 0.03 313301 +3825 1711743 644163 5585667 0.31 164271 +3830 630581 446249 51674571 0.01 105328 +3837 1919544 538473 7115524 0.27 163740 +3846 1397585 378345 18797524 0.07 553627 +3853 2230212 734501 3509429 0.64 119517 +4213 3035305 717826 8442667 0.36 170551 +4233 336188 332422 1460476 0.23 16160 +6907 1463324 1463324 2455406 0.6 44433 +6924 3065424 843580 0 0 316350 +6931 2357736 630818 167238 14.1 151177 +6937 3091504 555157 19007143 0.16 614271 +6938 6967651 1272659 12220095 0.57 433817 +6944 2257632 190656 52351790 0.04 865671 +6945 568841 74515 105533 5.39 324229 +6948 113087 113087 0 0 26963 +6949 389401 389401 1042512 0.37 86762 +7405 100040 100040 0 0 2141 +8405 43255179 4666925 2173006 19.91 2465373 +8406 31721893 4015049 34718190 0.91 2754544 +8409 29028334 3413447 0 0 3227469 +8413 43928508 3903215 1039346191 0.04 7051020 +8415 8693156 2352870 0 0 723275 +8416 30711959 3419496 64785321 0.47 1771994 +8418 18570114 5683167 0 0 1985641 +8423 9201931 4451886 423579905 0.02 1745088 +410 12810077 5531629 220069619 0.06 1543200 +413 2393948 1479867 10848952 0.22 280639 +414 6093291 3683479 257086476 0.02 1058221 +416 2504977 2120416 78595143 0.03 403347 +419 2667897 1450613 100257476 0.03 463419 +420 12339022 3239641 0 0 448156 +421 363107 205490 242860 1.5 43854 +427 4007592 2639512 23294190 0.17 1098425 +429 569000 447665 30074878 0.02 78361 +430 1090054 606352 477117709 0 152476 +505 8135167 1280970 16082048 0.51 577261 +509 15010774 2514222 134828333 0.11 1782662 +512 8729441 3003790 78728476 0.11 1567417 +515 1008309 521134 2000810 0.5 285271 +516 1221978 688514 27357286 0.04 424317 +522 13464493 5540168 20441000 0.66 1108729 +523 11510862 3200164 0 0 1200252 +524 4475632 2472077 31558048 0.14 565283 +1326 85766923 1957587 342713757 0.25 19623283 +1327 3545637 1358932 851938524 0 475364 +8319 971269 552233 612180480 0 210500 +8408 48765502 10670544 0 0 4073882 +8411 9317835 5511339 1198138870 0.01 2505238 +102 728775 693738 2381571 0.31 40441 +126 2151944 997868 0 0 21626 +135 0 0 2680238 0 0 +2103 1786211 914414 2094190 0.85 79211 +2114 1849585 842072 5030476 0.37 57338 +2134 2786521 1340334 1899524 1.47 78005 +2138 1757151 165977 1029048 1.71 26044 +3909 2941383 925461 5320571 0.55 262717 +6905 3226787 1301780 1331143 2.42 83234 +6910 768294 175849 0 0 48264 +6925 3079319 682412 919143 3.35 178276 +7116 2749838 869642 0 0 134076 +7151 5136608 3120501 769095 6.68 125103 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_232408- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 172491 89854 5736286 0.03 43053 +117 1206778 627923 84234381 0.01 456467 +118 105066 89831 2187571 0.05 14350 +119 1265181 799671 41914048 0.03 220823 +120 1026833 437738 36697090 0.03 195654 +140 937245 207009 4194571 0.22 271773 +518 1639446 1639446 39115762 0.04 139735 +717 2012119 1196537 3800524 0.53 166280 +722 5455554 1427745 3188381 1.71 443064 +724 5030893 2312414 1076143 4.67 376485 +2602 23224079 9441881 137560762 0.17 6331607 +2604 33446098 3077182 12170476 2.75 8998582 +2607 37280872 12033809 64529426 0.58 5049614 +2611 7004935 1690281 18864524 0.37 990675 +2613 8606761 1868993 38455857 0.22 1757264 +2615 29735724 4781365 40220667 0.74 7936521 +2616 24288543 5919056 61813667 0.39 3292765 +2619 31200623 5820158 27135351 1.15 4435595 +2621 19910134 3105952 34567976 0.58 3085740 +2623 7507240 909852 15321317 0.49 1177779 +2625 14042471 3313047 50560369 0.28 551314 +2626 15837521 3227111 34589649 0.46 2407148 +2628 4128019 1479588 68828429 0.06 951922 +2629 2014108 602483 34945421 0.06 1026246 +2632 1379373 591389 6013900 0.23 608756 +2634 17785367 1859967 70727276 0.25 4833090 +3006 31669877 3097690 1149098 27.56 3860589 +3023 61139295 4736471 15166960 4.03 1553603 +3026 30925117 5537946 5527811 5.59 667251 +3802 4810027 1203546 16890619 0.28 342275 +3807 4104051 762899 19841286 0.21 383486 +3808 10426280 1678271 27901619 0.37 1042960 +3815 1194661 633110 2387143 0.5 70733 +3819 7759241 2622833 32097905 0.24 528864 +3822 165356 165356 204368571 0 36987 +3824 1651478 634216 51884857 0.03 313301 +3825 1711743 644163 5585667 0.31 164271 +3830 630581 446249 51674571 0.01 105328 +3837 1919544 538473 7115524 0.27 163740 +3846 1397585 378345 18797524 0.07 553627 +3853 2230212 734501 3509429 0.64 119517 +4213 3035305 717826 8442667 0.36 170551 +4233 336188 332422 1460476 0.23 16160 +6907 1463324 1463324 2455406 0.6 44433 +6924 3065424 843580 0 0 316350 +6931 2357736 630818 167238 14.1 151177 +6937 3091504 555157 19007143 0.16 614271 +6938 6967651 1272659 12220095 0.57 433817 +6944 2257632 190656 52351790 0.04 865671 +6945 568841 74515 105533 5.39 324229 +6948 113087 113087 0 0 26963 +6949 389401 389401 1042512 0.37 86762 +7405 100040 100040 0 0 2141 +8405 43246058 4666919 2173006 19.9 2464853 +8406 31721893 4015049 34718190 0.91 2754544 +8409 29028318 3413448 0 0 3227467 +8413 43928424 3903220 1039346191 0.04 7051006 +8415 8693156 2352870 0 0 723275 +8416 30711958 3419496 64785321 0.47 1771994 +8418 18570114 5683167 0 0 1985641 +8423 9201931 4451886 423579905 0.02 1745088 +410 12810077 5531629 220069619 0.06 1543200 +413 2393948 1479867 10848952 0.22 280639 +414 6093291 3683479 257086476 0.02 1058221 +416 2504977 2120416 78595143 0.03 403347 +419 2667897 1450613 100257476 0.03 463419 +420 12339022 3239641 0 0 448156 +421 363107 205490 242860 1.5 43854 +427 4007592 2639512 23294190 0.17 1098425 +429 569000 447665 30074878 0.02 78361 +430 1090054 606352 477117709 0 152476 +505 8135167 1280970 16082048 0.51 577261 +509 15010774 2514222 134828333 0.11 1782662 +512 8729441 3003790 78728476 0.11 1567417 +515 1008309 521134 2000810 0.5 285271 +516 1221978 688514 27357286 0.04 424317 +522 13464493 5540168 20441000 0.66 1108729 +523 11510862 3200164 0 0 1200252 +524 4475632 2472077 31558048 0.14 565283 +1326 85765427 1957587 342713757 0.25 19622941 +1327 3545637 1358932 851938524 0 475364 +8319 971269 552233 612180480 0 210500 +8408 48765502 10670544 0 0 4073882 +8411 9317834 5511339 1198138870 0.01 2505238 +102 728775 693738 2381571 0.31 40441 +126 2151944 997868 0 0 21626 +135 0 0 2680238 0 0 +2103 1786211 914414 2094190 0.85 79211 +2114 1849585 842072 5030476 0.37 57338 +2134 2786521 1340334 1899524 1.47 78005 +2138 1757151 165977 1029048 1.71 26044 +3909 2941383 925461 5320571 0.55 262717 +6905 3226787 1301780 1331143 2.42 83234 +6910 768294 175849 0 0 48264 +6925 3079319 682412 919143 3.35 178276 +7116 2749838 869642 0 0 134076 +7151 5136608 3120501 769095 6.68 125103 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_001546- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 522172 411582 5736286 0.09 130332 +117 2616097 1823662 84234381 0.03 989545 +118 387061 288226 2187571 0.18 52864 +119 3441443 2418910 41914048 0.08 600666 +120 2341645 1116235 36697090 0.06 446180 +140 1033115 399820 4194571 0.25 299572 +518 3414007 3414007 39115762 0.09 290986 +717 4664204 2750617 3800524 1.23 385446 +722 6644114 3546696 3188381 2.08 539591 +724 8124416 5435225 1076143 7.55 607987 +2602 19880342 13242173 137560762 0.14 5420000 +2604 18166319 4690361 12170476 1.49 4887599 +2607 29839216 15583051 64529426 0.46 4041658 +2611 5571213 3339141 18864524 0.3 787910 +2613 8323637 4738156 38455857 0.22 1699458 +2615 20781869 6140112 40220667 0.52 5546720 +2616 15157850 8048977 61813667 0.25 2054929 +2619 16042418 7231026 27135351 0.59 2280649 +2621 12744168 5657168 34567976 0.37 1975134 +2623 4618007 2003123 15321317 0.3 724499 +2625 16450157 5919850 50560369 0.33 645841 +2626 10678452 5821529 34589649 0.31 1623020 +2628 6035983 4305397 68828429 0.09 1391899 +2629 1387471 858079 34945421 0.04 706957 +2632 1178132 851178 6013900 0.2 519943 +2634 9429067 2776136 70727276 0.13 2562305 +3006 15700809 6431738 1149098 13.66 1913944 +3023 36118512 8782802 15166960 2.38 917803 +3026 27874565 9641810 5527811 5.04 601432 +3802 5222414 2846408 16890619 0.31 371620 +3807 4504771 2336185 19841286 0.23 420929 +3808 9432523 3265741 27901619 0.34 943553 +3815 1929213 1558635 2387143 0.81 114224 +3819 8640186 4719311 32097905 0.27 588908 +3822 670702 670702 204368571 0 150023 +3824 3913123 2470766 51884857 0.08 742356 +3825 2256066 1733493 5585667 0.4 216507 +3830 2531575 1820215 51674571 0.05 422857 +3837 2052666 1402074 7115524 0.29 175095 +3846 1463700 906108 18797524 0.08 579817 +3853 2612962 1911256 3509429 0.74 140029 +4213 4583378 1818706 8442667 0.54 257536 +4233 1504762 1212941 1460476 1.03 72333 +6907 4488847 4265743 2455406 1.83 136300 +6924 5475217 1839526 0 0 565040 +6931 4180827 1577408 167238 25 268072 +6937 3033172 1308332 19007143 0.16 602681 +6938 6566571 2415655 12220095 0.54 408845 +6944 1721548 401855 52351790 0.03 660113 +6945 417357 152257 105533 3.95 237886 +6948 290914 290914 0 0 69363 +6949 769675 769675 1042512 0.74 171491 +7405 571377 559260 0 0 12230 +8405 18071745 5524737 2173006 8.32 1030018 +8406 23834840 7417851 34718190 0.69 2069678 +8409 23314953 6075204 0 0 2592236 +8413 25268711 5301439 1039346191 0.02 4055912 +8415 10625891 3446759 0 0 884079 +8416 25605725 8581471 64785321 0.4 1477379 +8418 19945043 9805934 0 0 2132657 +8423 6474087 5159400 423579905 0.02 1227769 +410 17250323 9813118 220069619 0.08 2078105 +413 2736102 2146039 10848952 0.25 320749 +414 10494961 7626911 257086476 0.04 1822659 +416 4408833 3859476 78595143 0.06 709903 +419 4293424 2772124 100257476 0.04 745777 +420 12463656 5815313 0 0 452683 +421 1357014 907012 242860 5.59 163894 +427 6344500 4804640 23294190 0.27 1738939 +429 1116104 938926 30074878 0.04 153707 +430 1281034 798282 477117709 0 179190 +505 6406370 1872683 16082048 0.4 454588 +509 11500028 3492374 134828333 0.09 1365730 +512 11154665 5817607 78728476 0.14 2002879 +515 1550146 804417 2000810 0.77 438568 +516 1896921 1204148 27357286 0.07 658682 +522 11503044 6456056 20441000 0.56 947215 +523 9505007 3904823 0 0 991099 +524 4222087 2932684 31558048 0.13 533259 +1326 28301015 3838777 342713757 0.08 6475210 +1327 3102727 1894825 851938524 0 415983 +8319 1288110 1065901 612180480 0 279168 +8408 24299776 11805196 0 0 2030009 +8411 15840096 11986835 1198138870 0.01 4258845 +102 3098368 2777448 2381571 1.3 171932 +126 5737440 3686015 0 0 57658 +135 156311 124320 2680238 0.06 5275 +2103 4973715 3506199 2094190 2.38 220562 +2114 7320273 4462323 5030476 1.46 226933 +2134 7002052 4909768 1899524 3.69 196013 +2138 2616114 649466 1029048 2.54 38776 +3909 3496070 2140098 5320571 0.66 312260 +6905 8236916 4906081 1331143 6.19 212470 +6910 1350596 667826 0 0 84844 +6925 3254299 1354826 919143 3.54 188406 +7116 5279821 2999940 0 0 257433 +7151 12388608 9961299 769095 16.11 301726 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_064031- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 522172 411582 5736286 0.09 130332 +117 2616097 1823662 84234381 0.03 989545 +118 387061 288226 2187571 0.18 52864 +119 3441443 2418910 41914048 0.08 600666 +120 2341645 1116235 36697090 0.06 446180 +140 1033115 399820 4194571 0.25 299572 +518 3414007 3414007 39115762 0.09 290986 +717 4664204 2750617 3800524 1.23 385446 +722 6644114 3546696 3188381 2.08 539591 +724 8124416 5435225 1076143 7.55 607987 +2602 19880342 13242173 137560762 0.14 5420000 +2604 18166319 4690361 12170476 1.49 4887599 +2607 29839216 15583051 64529426 0.46 4041658 +2611 5571213 3339141 18864524 0.3 787910 +2613 8323637 4738156 38455857 0.22 1699458 +2615 20781869 6140112 40220667 0.52 5546720 +2616 15157850 8048977 61813667 0.25 2054929 +2619 16042418 7231026 27135351 0.59 2280649 +2621 12744168 5657168 34567976 0.37 1975134 +2623 4618007 2003123 15321317 0.3 724499 +2625 16450157 5919850 50560369 0.33 645841 +2626 10678452 5821529 34589649 0.31 1623020 +2628 6035983 4305397 68828429 0.09 1391899 +2629 1387471 858079 34945421 0.04 706957 +2632 1178132 851178 6013900 0.2 519943 +2634 9429067 2776136 70727276 0.13 2562305 +3006 15700809 6431738 1149098 13.66 1913944 +3023 36118512 8782802 15166960 2.38 917803 +3026 27874565 9641810 5527811 5.04 601432 +3802 5222414 2846408 16890619 0.31 371620 +3807 4504771 2336185 19841286 0.23 420929 +3808 9432523 3265741 27901619 0.34 943553 +3815 1929213 1558635 2387143 0.81 114224 +3819 8640186 4719311 32097905 0.27 588908 +3822 670702 670702 204368571 0 150023 +3824 3913123 2470766 51884857 0.08 742356 +3825 2256066 1733493 5585667 0.4 216507 +3830 2531575 1820215 51674571 0.05 422857 +3837 2052666 1402074 7115524 0.29 175095 +3846 1463700 906108 18797524 0.08 579817 +3853 2612962 1911256 3509429 0.74 140029 +4213 4583378 1818706 8442667 0.54 257536 +4233 1504762 1212941 1460476 1.03 72333 +6907 4488847 4265743 2455406 1.83 136300 +6924 5475217 1839526 0 0 565040 +6931 4180827 1577408 167238 25 268072 +6937 3033172 1308332 19007143 0.16 602681 +6938 6566571 2415655 12220095 0.54 408845 +6944 1721548 401855 52351790 0.03 660113 +6945 417357 152257 105533 3.95 237886 +6948 290914 290914 0 0 69363 +6949 769675 769675 1042512 0.74 171491 +7405 571377 559260 0 0 12230 +8405 18071745 5524737 2173006 8.32 1030018 +8406 23834840 7417851 34718190 0.69 2069678 +8409 23314953 6075204 0 0 2592236 +8413 25268711 5301439 1039346191 0.02 4055912 +8415 10625891 3446759 0 0 884079 +8416 25605725 8581471 64785321 0.4 1477379 +8418 19945043 9805934 0 0 2132657 +8423 6474087 5159400 423579905 0.02 1227769 +410 17250323 9813118 220069619 0.08 2078105 +413 2736102 2146039 10848952 0.25 320749 +414 10494961 7626911 257086476 0.04 1822659 +416 4408833 3859476 78595143 0.06 709903 +419 4293424 2772124 100257476 0.04 745777 +420 12463656 5815313 0 0 452683 +421 1357014 907012 242860 5.59 163894 +427 6344500 4804640 23294190 0.27 1738939 +429 1116104 938926 30074878 0.04 153707 +430 1281034 798282 477117709 0 179190 +505 6406370 1872683 16082048 0.4 454588 +509 11500028 3492374 134828333 0.09 1365730 +512 11154665 5817607 78728476 0.14 2002879 +515 1550146 804417 2000810 0.77 438568 +516 1896921 1204148 27357286 0.07 658682 +522 11503044 6456056 20441000 0.56 947215 +523 9505007 3904823 0 0 991099 +524 4222087 2932684 31558048 0.13 533259 +1326 28301015 3838777 342713757 0.08 6475210 +1327 3102727 1894825 851938524 0 415983 +8319 1288110 1065901 612180480 0 279168 +8408 24299776 11805196 0 0 2030009 +8411 15840096 11986835 1198138870 0.01 4258845 +102 3098368 2777448 2381571 1.3 171932 +126 5737440 3686015 0 0 57658 +135 156311 124320 2680238 0.06 5275 +2103 4973715 3506199 2094190 2.38 220562 +2114 7320273 4462323 5030476 1.46 226933 +2134 7002052 4909768 1899524 3.69 196013 +2138 2616114 649466 1029048 2.54 38776 +3909 3496070 2140098 5320571 0.66 312260 +6905 8236916 4906081 1331143 6.19 212470 +6910 1350596 667826 0 0 84844 +6925 3254299 1354826 919143 3.54 188406 +7116 5279821 2999940 0 0 257433 +7151 12388608 9961299 769095 16.11 301726 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_092804- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 38675 27544 5736286 0.01 9653 +117 394331 278470 84234381 0 149157 +118 35132 34197 2187571 0.02 4798 +119 330485 307797 41914048 0.01 57683 +120 249434 219223 36697090 0.01 47527 +140 336145 126592 4194571 0.08 97472 +518 1010173 1010173 39115762 0.03 86100 +717 976888 783850 3800524 0.26 80729 +722 3110284 938037 3188381 0.98 252597 +724 2920256 1544733 1076143 2.71 218536 +2602 15308788 7069337 137560762 0.11 4173652 +2604 21215305 2434785 12170476 1.74 5707921 +2607 26612278 9860233 64529426 0.41 3604576 +2611 3519411 1116151 18864524 0.19 497734 +2613 4798906 1268970 38455857 0.12 979805 +2615 20877230 3963346 40220667 0.52 5572172 +2616 16551489 4580895 61813667 0.27 2243863 +2619 21005241 4614979 27135351 0.77 2986182 +2621 10871056 2145977 34567976 0.31 1684833 +2623 3642063 585074 15321317 0.24 571388 +2625 8525637 2517849 50560369 0.17 334721 +2626 8120689 2188318 34589649 0.23 1234265 +2628 2191922 943407 68828429 0.03 505458 +2629 1316258 438702 34945421 0.04 670671 +2632 900983 445887 6013900 0.15 397629 +2634 11395395 1464867 70727276 0.16 3096645 +3006 19850646 2247870 1149098 17.27 2419813 +3023 40997408 3646569 15166960 2.7 1041780 +3026 18827517 4291872 5527811 3.41 406229 +3802 1877964 779972 16890619 0.11 133634 +3807 1546681 474819 19841286 0.08 144523 +3808 5161426 1188763 27901619 0.18 516307 +3815 481722 352534 2387143 0.2 28522 +3819 4100841 1876401 32097905 0.13 279510 +3822 85839 85839 204368571 0 19201 +3824 561896 339953 51884857 0.01 106597 +3825 667061 361257 5585667 0.12 64016 +3830 242767 214485 51674571 0 40550 +3837 816296 300685 7115524 0.11 69631 +3846 626274 201417 18797524 0.03 248087 +3853 919435 421988 3509429 0.26 49273 +4213 1416413 460989 8442667 0.17 79587 +4233 172556 172556 1460476 0.12 8295 +6907 873644 873644 2455406 0.36 26527 +6924 1282803 555927 0 0 132385 +6931 997792 410328 167238 5.97 63978 +6937 1246886 331039 19007143 0.07 247752 +6938 3205651 824573 12220095 0.26 199589 +6944 953483 114139 52351790 0.02 365605 +6945 265883 45054 105533 2.52 151548 +6948 72176 72176 0 0 17209 +6949 215863 215863 1042512 0.21 48096 +7405 51784 51784 0 0 1108 +8405 29733769 3927795 2173006 13.68 1694707 +8406 18450639 3091805 34718190 0.53 1602146 +8409 14146846 2614742 0 0 1572894 +8413 27319482 3287464 1039346191 0.03 4385084 +8415 5107451 1910689 0 0 424942 +8416 17624686 2195132 64785321 0.27 1016895 +8418 9951525 4359924 0 0 1064084 +8423 6872511 3629978 423579905 0.02 1303328 +410 7722085 4188782 220069619 0.04 930261 +413 1663790 1130202 10848952 0.15 195044 +414 3617298 2618371 257086476 0.01 628216 +416 1777315 1594935 78595143 0.02 286180 +419 1566143 1070939 100257476 0.02 272042 +420 7639005 2480695 0 0 277450 +421 95433 75117 242860 0.39 11526 +427 2635913 1985126 23294190 0.11 722467 +429 319179 289775 30074878 0.01 43957 +430 841845 503834 477117709 0 117757 +505 5047848 1036597 16082048 0.31 358189 +509 9629885 2067900 134828333 0.07 1143634 +512 5818434 2260436 78728476 0.07 1044730 +515 663143 413384 2000810 0.33 187617 +516 794443 525478 27357286 0.03 275860 +522 10049227 4736774 20441000 0.49 827500 +523 7959720 2665627 0 0 829970 +524 3373298 2064769 31558048 0.11 426056 +1326 55494123 1452009 342713757 0.16 12696933 +1327 2465128 1060460 851938524 0 330500 +8319 577930 362894 612180480 0 125253 +8408 35491994 8824917 0 0 2965010 +8411 6194799 3906419 1198138870 0.01 1665564 +102 343373 343336 2381571 0.14 19054 +126 873495 522794 0 0 8778 +135 0 0 2680238 0 0 +2103 867809 491807 2094190 0.41 38483 +2114 563585 351379 5030476 0.11 17471 +2134 1324523 739343 1899524 0.7 37078 +2138 736515 90589 1029048 0.72 10917 +3909 1116062 537981 5320571 0.21 99684 +6905 1372970 685983 1331143 1.03 35415 +6910 360458 101792 0 0 22644 +6925 1573610 443698 919143 1.71 91103 +7116 1283008 485557 0 0 62557 +7151 2517285 1817133 769095 3.27 61309 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_095821- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) +101 38675 27544 5736286 0.01 9653 +117 394331 278470 84234381 0 149157 +118 35132 34197 2187571 0.02 4798 +119 330485 307797 41914048 0.01 57683 +120 249434 219223 36697090 0.01 47527 +140 336145 126592 4194571 0.08 97472 +518 1010173 1010173 39115762 0.03 86100 +717 976888 783850 3800524 0.26 80729 +722 3110284 938037 3188381 0.98 252597 +724 2920256 1544733 1076143 2.71 218536 +2602 15308788 7069337 137560762 0.11 4173652 +2604 21215305 2434785 12170476 1.74 5707921 +2607 26612278 9860233 64529426 0.41 3604576 +2611 3519411 1116151 18864524 0.19 497734 +2613 4798906 1268970 38455857 0.12 979805 +2615 20877230 3963346 40220667 0.52 5572172 +2616 16551489 4580895 61813667 0.27 2243863 +2619 21005241 4614979 27135351 0.77 2986182 +2621 10871056 2145977 34567976 0.31 1684833 +2623 3642063 585074 15321317 0.24 571388 +2625 8525637 2517849 50560369 0.17 334721 +2626 8120689 2188318 34589649 0.23 1234265 +2628 2191922 943407 68828429 0.03 505458 +2629 1316258 438702 34945421 0.04 670671 +2632 900983 445887 6013900 0.15 397629 +2634 11395395 1464867 70727276 0.16 3096645 +3006 19850646 2247870 1149098 17.27 2419813 +3023 40997408 3646569 15166960 2.7 1041780 +3026 18827517 4291872 5527811 3.41 406229 +3802 1877964 779972 16890619 0.11 133634 +3807 1546681 474819 19841286 0.08 144523 +3808 5161426 1188763 27901619 0.18 516307 +3815 481722 352534 2387143 0.2 28522 +3819 4100841 1876401 32097905 0.13 279510 +3822 85839 85839 204368571 0 19201 +3824 561896 339953 51884857 0.01 106597 +3825 667061 361257 5585667 0.12 64016 +3830 242767 214485 51674571 0 40550 +3837 816296 300685 7115524 0.11 69631 +3846 626274 201417 18797524 0.03 248087 +3853 919435 421988 3509429 0.26 49273 +4213 1416413 460989 8442667 0.17 79587 +4233 172556 172556 1460476 0.12 8295 +6907 873644 873644 2455406 0.36 26527 +6924 1282803 555927 0 0 132385 +6931 997792 410328 167238 5.97 63978 +6937 1246886 331039 19007143 0.07 247752 +6938 3205651 824573 12220095 0.26 199589 +6944 953483 114139 52351790 0.02 365605 +6945 265883 45054 105533 2.52 151548 +6948 72176 72176 0 0 17209 +6949 215863 215863 1042512 0.21 48096 +7405 51784 51784 0 0 1108 +8405 29733769 3927795 2173006 13.68 1694707 +8406 18450639 3091805 34718190 0.53 1602146 +8409 14146846 2614742 0 0 1572894 +8413 27319482 3287464 1039346191 0.03 4385084 +8415 5107451 1910689 0 0 424942 +8416 17624686 2195132 64785321 0.27 1016895 +8418 9951525 4359924 0 0 1064084 +8423 6872511 3629978 423579905 0.02 1303328 +410 7722085 4188782 220069619 0.04 930261 +413 1663790 1130202 10848952 0.15 195044 +414 3617298 2618371 257086476 0.01 628216 +416 1777315 1594935 78595143 0.02 286180 +419 1566143 1070939 100257476 0.02 272042 +420 7639005 2480695 0 0 277450 +421 95433 75117 242860 0.39 11526 +427 2635913 1985126 23294190 0.11 722467 +429 319179 289775 30074878 0.01 43957 +430 841845 503834 477117709 0 117757 +505 5047848 1036597 16082048 0.31 358189 +509 9629885 2067900 134828333 0.07 1143634 +512 5818434 2260436 78728476 0.07 1044730 +515 663143 413384 2000810 0.33 187617 +516 794443 525478 27357286 0.03 275860 +522 10049227 4736774 20441000 0.49 827500 +523 7959720 2665627 0 0 829970 +524 3373298 2064769 31558048 0.11 426056 +1326 55494123 1452009 342713757 0.16 12696933 +1327 2465128 1060460 851938524 0 330500 +8319 577930 362894 612180480 0 125253 +8408 35491994 8824917 0 0 2965010 +8411 6194799 3906419 1198138870 0.01 1665564 +102 343373 343336 2381571 0.14 19054 +126 873495 522794 0 0 8778 +135 0 0 2680238 0 0 +2103 867809 491807 2094190 0.41 38483 +2114 563585 351379 5030476 0.11 17471 +2134 1324523 739343 1899524 0.7 37078 +2138 736515 90589 1029048 0.72 10917 +3909 1116062 537981 5320571 0.21 99684 +6905 1372970 685983 1331143 1.03 35415 +6910 360458 101792 0 0 22644 +6925 1573610 443698 919143 1.71 91103 +7116 1283008 485557 0 0 62557 +7151 2517285 1817133 769095 3.27 61309 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_190616- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) Transfert_corrigé(m3) +101 7426426 1176074 5736286 1.29 1853606 293543 +117 36040367 5990710 84234381 0.43 13632359 2266001 +118 7315780 835391 2187571 3.34 999171 114096 +119 51274544 6753487 41914048 1.22 8949409 1178747 +120 51793191 2421042 36697090 1.41 9868748 461309 +140 19266094 755264 4194571 4.59 5586590 219004 +518 9916446 8790808 39115762 0.25 845210 749268 +717 50487756 3747753 3800524 13.28 4172269 309711 +722 63055484 6119612 3188381 19.78 5120948 496994 +724 57716281 8936782 1076143 53.63 4319174 668780 +2602 187942394 28795636 137560762 1.37 51238951 7850587 +2604 281954485 6771017 12170476 23.17 75859092 1821724 +2607 224628131 26059560 64529426 3.48 30425399 3529712 +2611 104353746 6791882 18864524 5.53 14758256 960544 +2613 111158480 8497753 38455857 2.89 22695509 1735008 +2615 177654409 10339225 40220667 4.42 47416297 2759559 +2616 157722183 16002424 61813667 2.55 21382183 2169427 +2619 224344442 14254296 27135351 8.27 31893631 2026443 +2621 254841665 11614199 34567976 7.37 39496222 1800008 +2623 126052256 3454972 15321317 8.23 19775804 542036 +2625 160059284 7690556 50560369 3.17 6284005 301935 +2626 207498996 11364828 34589649 6 31537820 1727343 +2628 56668054 8746064 68828429 0.82 13067667 2016845 +2629 14255578 2008109 34945421 0.41 7263628 1023189 +2632 12259262 1503744 6013900 2.04 5410354 663644 +2634 138054597 4195322 70727276 1.95 37515689 1140059 +3006 267490142 10850534 1149098 232.78 32607312 1322691 +3023 440024757 13857230 15166960 29.01 11181414 352124 +3026 310834207 13322649 5527811 56.23 6706669 287454 +3802 113752692 5869679 16890619 6.73 8094498 417679 +3807 104835669 4914488 19841286 5.28 9795921 459213 +3808 187939094 6568791 27901619 6.74 18799900 657088 +3815 36069354 3191115 2387143 15.11 2135574 188938 +3819 133955763 11814054 32097905 4.17 9130321 805237 +3822 2120352 1631404 204368571 0.01 474283 364914 +3824 67637835 4436537 51884857 1.3 12831535 841653 +3825 45163129 3748596 5585667 8.09 4334162 359741 +3830 38520729 3811423 51674571 0.75 6434244 636635 +3837 44429676 2684731 7115524 6.24 3789914 229011 +3846 26743438 2826389 18797524 1.42 10593914 1119621 +3853 51690961 3870613 3509429 14.73 2770124 207427 +4213 53801142 3404795 8442667 6.37 3023040 191313 +4233 13549660 1825697 1460476 9.28 651327 87761 +6907 13713501 8133213 2455406 5.59 416399 246958 +6924 70515978 2719917 0 0 7277218 280694 +6931 52043264 2378762 167238 311.19 3336981 152525 +6937 73639399 2720457 19007143 3.87 14631902 540546 +6938 143624842 4424733 12220095 11.75 8942313 275491 +6944 45052714 674276 52351790 0.86 17275097 258546 +6945 10758933 239403 105533 101.95 6132391 136455 +6948 566376 566376 0 0 135041 135041 +6949 2563662 2563662 1042512 2.46 571208 571208 +7405 2445942 1277390 0 0 52356 27343 +8405 284424424 10840090 2173006 130.89 16211061 617842 +8406 319173224 10998887 34718190 9.19 27715136 955079 +8409 465731711 8208402 0 0 51781632 912638 +8413 395412440 6450821 1039346191 0.38 63468144 1035429 +8415 85650019 4297547 0 0 7126123 357558 +8416 467202420 13141097 64785321 7.21 26956275 758205 +8418 238709030 14269175 0 0 25524363 1525755 +8423 37736640 12686105 423579905 0.09 7156514 2405839 +410 139128347 14070098 220069619 0.63 16760462 1694991 +413 15608098 4876513 10848952 1.44 1829715 571667 +414 68823316 12195963 257086476 0.27 11952538 2118072 +416 11069543 6153800 78595143 0.14 1782398 990874 +419 32178382 4015681 100257476 0.32 5589452 697532 +420 110202884 8475810 0 0 4002593 307843 +421 22481057 2500606 242860 92.57 2715158 302012 +427 34581292 6864435 23294190 1.48 9478247 1881445 +429 11856241 2598172 30074878 0.39 1632810 357813 +430 4938810 1873127 477117709 0.01 690836 262011 +505 79153962 2824103 16082048 4.92 5616666 200395 +509 125196422 5473465 134828333 0.93 14868178 650022 +512 54008372 9781866 78728476 0.69 9697487 1756385 +515 8421958 965850 2000810 4.21 2382744 273259 +516 10670537 1612129 27357286 0.39 3705212 559792 +522 66706239 11376658 20441000 3.26 5492904 936807 +523 74632355 7294126 0 0 7782007 760567 +524 22536653 5348926 31558048 0.71 2846432 675582 +1326 645482830 6553090 342713757 1.88 147685052 1499332 +1327 22320298 4044902 851938524 0.03 2992482 542300 +8319 11103538 2994342 612180480 0.02 2406436 648955 +8408 253151853 24273166 0 0 21148368 2027786 +8411 81678532 19734934 1198138870 0.07 21960487 5306030 +102 24634604 5832310 2381571 10.34 1367001 323641 +126 62465080 7146419 0 0 627741 71818 +135 8679818 349606 2680238 3.24 292902 11798 +2103 41359289 6813648 2094190 19.75 1834100 302155 +2114 106202622 7355393 5030476 21.11 3292348 228022 +2134 60890619 9640859 1899524 32.06 1704547 269882 +2138 55287075 945755 1029048 53.73 819457 14018 +3909 63483672 5967624 5320571 11.93 5670202 533013 +6905 104498201 9213797 1331143 78.5 2695509 237668 +6910 13345150 1123837 0 0 838338 70599 +6925 44110958 3199322 919143 47.99 2553784 185223 +7116 61603979 5721859 0 0 3003681 278986 +7151 107946733 18473020 769095 140.36 2629052 449912 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_190853- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) Transfert_corrigé(m3) +101 3420629 904098 5736286 0.6 853775 225659 +117 16371486 4279784 84234381 0.19 6192555 1618839 +118 3275600 673350 2187571 1.5 447373 91964 +119 24689371 5118239 41914048 0.59 4309259 893333 +120 25390229 2004126 36697090 0.69 4837890 381869 +140 10789429 692571 4194571 2.57 3128611 200825 +518 8330235 7910280 39115762 0.21 710012 674218 +717 38222613 3670475 3800524 10.06 3158687 303325 +722 46816167 6179544 3188381 14.68 3802099 501862 +724 50544848 8744615 1076143 46.97 3782502 654400 +2602 119321675 25437706 137560762 0.87 32530805 6935111 +2604 164484747 6718477 12170476 13.52 44254176 1807588 +2607 184252951 25859687 64529426 2.86 24956667 3502639 +2611 49815829 5925494 18864524 2.64 7045217 838015 +2613 57382995 7553336 38455857 1.49 11716032 1542184 +2615 143176095 9726104 40220667 3.56 38213970 2595916 +2616 92347532 14094083 61813667 1.49 12519430 1910716 +2619 101357999 12481765 27135351 3.74 14409426 1774454 +2621 112785388 10006579 34567976 3.26 17479860 1550853 +2623 49252971 3011733 15321317 3.21 7727090 472498 +2625 119923535 7510043 50560369 2.37 4708256 294848 +2626 93386599 10536242 34589649 2.7 14193850 1601406 +2628 32261462 7433194 68828429 0.47 7439501 1714096 +2629 7904817 1790253 34945421 0.23 4027733 912186 +2632 6830973 1409711 6013900 1.14 3014699 622145 +2634 80773107 4182888 70727276 1.14 21949713 1136680 +3006 101706901 9395906 1149098 88.51 12398171 1145370 +3023 242990267 12440814 15166960 16.02 6174595 316132 +3026 191939888 12956690 5527811 34.72 4141363 279558 +3802 51609314 5218123 16890619 3.06 3672454 371315 +3807 46514743 4327572 19841286 2.34 4346371 404371 +3808 85335558 5806084 27901619 3.06 8536276 580794 +3815 15858686 2763665 2387143 6.64 938952 163630 +3819 63870795 9454426 32097905 1.99 4353384 644406 +3822 1365771 1279050 204368571 0.01 305497 286099 +3824 36440000 3831261 51884857 0.7 6913012 726826 +3825 19808057 3252036 5585667 3.55 1900916 312088 +3830 19929486 3167813 51674571 0.39 3328887 529130 +3837 19023771 2438040 7115524 2.67 1622755 207968 +3846 11234857 1956756 18797524 0.6 4450479 775132 +3853 22035143 3466321 3509429 6.28 1180865 185760 +4213 42024474 3418986 8442667 4.98 2361319 192110 +4233 11247694 1829025 1460476 7.7 540672 87921 +6907 13371378 8098285 2455406 5.45 406011 245898 +6924 56948351 2720285 0 0 5877045 280732 +6931 41888471 2338518 167238 250.47 2685862 149944 +6937 31757829 2180322 19007143 1.67 6310174 433223 +6938 60326246 3902751 12220095 4.94 3756009 242992 +6944 18097371 610613 52351790 0.35 6939290 234134 +6945 4280343 239403 105533 40.56 2439716 136455 +6948 566373 566373 0 0 135041 135041 +6949 1726000 1726000 1042512 1.66 384569 384569 +7405 1666233 1098927 0 0 35666 23523 +8405 122304588 9211486 2173006 56.28 6970875 525018 +8406 152935501 9884113 34718190 4.41 13280025 858279 +8409 256746333 7904263 0 0 28545929 878823 +8413 190138236 6385197 1039346191 0.18 30519325 1024896 +8415 80599453 4337190 0 0 6705913 360856 +8416 217486679 11586297 64785321 3.36 12548374 668497 +8418 149555765 13793208 0 0 15991501 1474862 +8423 23341883 10752012 423579905 0.06 4426640 2039051 +410 119005697 13759444 220069619 0.54 14336334 1657568 +413 13031174 4871648 10848952 1.2 1527626 571096 +414 59759943 11210264 257086476 0.23 10378503 1946885 +416 11128201 6212458 78595143 0.14 1791843 1000319 +419 28332789 3760971 100257476 0.28 4921465 653288 +420 90085251 8483050 0 0 3271917 308106 +421 10185543 1991723 242860 41.94 1230163 240551 +427 32494973 6851755 23294190 1.39 8906416 1877970 +429 6001692 2222708 30074878 0.2 826537 306105 +430 4694654 1628975 477117709 0.01 656684 227860 +505 60234753 2825464 16082048 3.75 4274183 200491 +509 97560574 5426185 134828333 0.72 11586178 644407 +512 52953580 9038599 78728476 0.67 9508093 1622928 +515 8421958 965850 2000810 4.21 2382744 273259 +516 10442128 1613340 27357286 0.38 3625900 560212 +522 63453427 11379645 20441000 3.1 5225052 937053 +523 58503681 7048867 0 0 6100251 734994 +524 21874278 5346393 31558048 0.69 2762773 675262 +1326 253777371 5659430 342713757 0.74 58063705 1294865 +1327 18384206 4039810 851938524 0.02 2464771 541617 +8319 7841519 2418923 612180480 0.01 1699469 524246 +8408 157440720 21080452 0 0 13152636 1761066 +8411 60957142 16667112 1198138870 0.05 16389233 4481201 +102 13820517 5365809 2381571 5.8 766916 297755 +126 39110349 6623292 0 0 393038 66561 +135 2400743 316019 2680238 0.9 81014 10664 +2103 30036922 6372985 2094190 14.34 1332004 282613 +2114 59950686 6946768 5030476 11.92 1858509 215354 +2134 44106133 9199992 1899524 23.22 1234689 257541 +2138 31745289 925284 1029048 30.85 470524 13714 +3909 28214662 4921179 5320571 5.3 2520063 439547 +6905 60592201 8756573 1331143 45.52 1562963 225874 +6910 10867975 1059435 0 0 682723 66553 +6925 28163210 2815230 919143 30.64 1630496 162987 +7116 39613015 5495679 0 0 1931448 267958 +7151 67099156 17945021 769095 87.24 1634206 437053 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_191636- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) Transfert_corrigé(m3) +101 5488846 989374 5736286 0.96 1369994 246944 +117 26992413 4805907 84234381 0.32 10209948 1817846 +118 4704512 763282 2187571 2.15 642531 104247 +119 38632144 5733936 41914048 0.92 6742817 1000796 +120 40420623 2084069 36697090 1.1 7701803 397102 +140 15150080 672868 4194571 3.61 4393069 195112 +518 8231492 7808833 39115762 0.21 701596 665571 +717 37548587 3647082 3800524 9.88 3102986 301392 +722 49407550 5505537 3188381 15.5 4012554 447123 +724 44026531 8154158 1076143 40.91 3294707 610213 +2602 148214101 25263414 137560762 1.08 40407781 6887594 +2604 222881988 6504485 12170476 18.31 59965796 1750014 +2607 179271289 24349506 64529426 2.78 24281912 3298088 +2611 82227878 6056720 18864524 4.36 11629100 856573 +2613 85816937 7403271 38455857 2.23 17521462 1511545 +2615 143179143 9343905 40220667 3.56 38214783 2493906 +2616 124801550 14343736 61813667 2.02 16919177 1944561 +2619 174465417 12944116 27135351 6.43 24802645 1840183 +2621 195640581 10560016 34567976 5.66 30321038 1636627 +2623 97830535 3143527 15321317 6.39 15348218 493175 +2625 123055826 7297489 50560369 2.43 4831231 286503 +2626 160570997 10585888 34589649 4.64 24405222 1608952 +2628 45266044 7475131 68828429 0.66 10438361 1723767 +2629 11303570 1754410 34945421 0.32 5759495 893923 +2632 9684106 1418869 6013900 1.61 4273866 626187 +2634 109659563 3991245 70727276 1.55 29799472 1084602 +3006 211056318 9797023 1149098 183.67 25727973 1194267 +3023 349980911 12544763 15166960 23.08 8893321 318773 +3026 241303466 12565208 5527811 43.65 5206449 271111 +3802 88099218 5351575 16890619 5.22 6269029 380811 +3807 79795861 4644976 19841286 4.02 7456183 434030 +3808 143728078 6030760 27901619 5.15 14377389 603268 +3815 26768369 2920014 2387143 11.21 1584886 172887 +3819 103076375 9843342 32097905 3.21 7025606 670915 +3822 1560593 1361644 204368571 0.01 349075 304574 +3824 52972249 3861787 51884857 1.02 10049335 732617 +3825 33276073 3371667 5585667 5.96 3193399 323568 +3830 31288015 3336115 51674571 0.61 5226140 557242 +3837 32812545 2599940 7115524 4.61 2798957 221779 +3846 20343828 2219756 18797524 1.08 8058827 879315 +3853 38826078 3608545 3509429 11.06 2080693 193382 +4213 42037063 2907599 8442667 4.98 2362027 163376 +4233 9971210 1719780 1460476 6.83 479312 82669 +6907 10952164 7135921 2455406 4.46 332553 216676 +6924 53061915 2619630 0 0 5475966 270345 +6931 38997780 2256719 167238 233.19 2500513 144699 +6937 56558008 2479100 19007143 2.98 11237887 492589 +6938 111796707 3985421 12220095 9.15 6960642 248139 +6944 34171787 673293 52351790 0.65 13102894 258169 +6945 8091638 239403 105533 76.67 4612083 136455 +6948 487991 487991 0 0 116352 116352 +6949 1969215 1969215 1042512 1.89 438759 438759 +7405 1700147 1096726 0 0 36392 23475 +8405 224137604 9573590 2173006 103.15 12774952 545657 +8406 249515176 10181013 34718190 7.19 21666439 884060 +8409 358060386 7763633 0 0 39810369 863187 +8413 311306811 6091815 1039346191 0.3 49968245 977805 +8415 67525121 4106369 0 0 5618122 341652 +8416 349542897 12315655 64785321 5.4 20167649 710579 +8418 184814799 13215933 0 0 19761632 1413136 +8423 32050831 11007630 423579905 0.08 6078237 2087527 +410 108535642 13172040 220069619 0.49 13075031 1586804 +413 12655886 4558039 10848952 1.17 1483631 534332 +414 53852525 11007420 257086476 0.21 9352562 1911657 +416 9440499 5529298 78595143 0.12 1520093 890318 +419 24956730 3776615 100257476 0.25 4335036 656006 +420 86864443 7988818 0 0 3154936 290156 +421 16522839 2168430 242860 68.03 1995552 261893 +427 27568439 6240524 23294190 1.18 7556122 1710440 +429 8611077 2272992 30074878 0.29 1185895 313030 +430 4021620 1596096 477117709 0.01 562541 223260 +505 62155343 2686735 16082048 3.86 4410465 190647 +509 99287371 5127984 134828333 0.74 11791250 608993 +512 44074307 8657783 78728476 0.56 7913773 1554550 +515 6553053 950162 2000810 3.28 1853993 268820 +516 8419556 1514088 27357286 0.31 2923587 525748 +522 54650639 10629425 20441000 2.67 4500189 875277 +523 59961976 6609884 0 0 6252309 689221 +524 18273572 5032995 31558048 0.58 2307995 635679 +1326 509265308 5985864 342713757 1.49 116518783 1369552 +1327 17981604 3649884 851938524 0.02 2410794 489340 +8319 8543597 2530891 612180480 0.01 1851628 548512 +8408 206559437 21539050 0 0 17256026 1799378 +8411 64330087 17625106 1198138870 0.05 17296100 4738772 +102 17196717 5084727 2381571 7.22 954265 282157 +126 47076706 6539129 0 0 473096 65715 +135 5963279 349607 2680238 2.22 201232 11798 +2103 30897371 6145673 2094190 14.75 1370161 272533 +2114 77705243 7131507 5030476 15.45 2408911 221081 +2134 45756932 8388077 1899524 24.09 1280900 234812 +2138 41122707 926209 1029048 39.96 609515 13728 +3909 47536705 5144151 5320571 8.93 4245859 459463 +6905 78258053 8148854 1331143 58.79 2018650 210198 +6910 9942445 1100250 0 0 624581 69117 +6925 34062388 2782443 919143 37.06 1972027 161088 +7116 46370117 5116567 0 0 2260910 249473 +7151 82552113 16362161 769095 107.34 2010564 398502 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_194342- cantons analyses - en m3 +Canton Demande(m3) Transfert(m3) Prélèvement(m3) Demande/Prélèvement Demande_corrigée(m3) Transfert_corrigé(m3) +101 3420629 904098 5736286 0.6 853775 225659 +117 16371486 4279784 84234381 0.19 6192555 1618839 +118 3264400 673350 2187571 1.49 445844 91964 +119 24705662 5113532 41914048 0.59 4312102 892511 +120 25319899 1997728 36697090 0.69 4824490 380650 +140 11075785 669000 4194571 2.64 3211646 193990 +518 7839638 7604648 39115762 0.2 668197 648168 +717 33824897 3640763 3800524 8.9 2795263 300870 +722 42933329 5506792 3188381 13.47 3486760 447225 +724 42432698 8076818 1076143 39.43 3175433 604425 +2602 114001185 24854541 137560762 0.83 31080274 6776122 +2604 158764617 6471800 12170476 13.05 42715191 1741220 +2607 164356260 24303083 64529426 2.55 22261703 3291801 +2611 49882189 5917307 18864524 2.64 7054602 836857 +2613 55285085 7168600 38455857 1.44 11287696 1463631 +2615 130055460 9220540 40220667 3.23 34712048 2460980 +2616 88832177 13983447 61813667 1.44 12042858 1895717 +2619 99244091 12468696 27135351 3.66 14108905 1772596 +2621 112475252 10008715 34567976 3.25 17431794 1551184 +2623 49299257 3008823 15321317 3.22 7734351 472042 +2625 106753731 7259130 50560369 2.11 4191203 284997 +2626 92865207 10382373 34589649 2.68 14114604 1578019 +2628 32115943 7236502 68828429 0.47 7405944 1668739 +2629 7729921 1747090 34945421 0.22 3938618 890193 +2632 6713616 1417259 6013900 1.12 2962906 625476 +2634 78995932 3986498 70727276 1.12 21466774 1083312 +3006 101250982 9079980 1149098 88.11 12342594 1106858 +3023 233148305 12222436 15166960 15.37 5924503 310583 +3026 177895768 12473824 5527811 32.18 3838342 269140 +3802 51769149 5190604 16890619 3.06 3683827 369357 +3807 46457943 4336214 19841286 2.34 4341064 405179 +3808 82553133 5692205 27901619 2.96 8257945 569402 +3815 15858686 2763682 2387143 6.64 938952 163630 +3819 61044092 9229945 32097905 1.9 4160718 629106 +3822 1323796 1239385 204368571 0.01 296108 277227 +3824 36271882 3766387 51884857 0.7 6881118 714519 +3825 19759345 3243141 5585667 3.54 1896242 311234 +3830 19867058 3150535 51674571 0.38 3318460 526244 +3837 19023771 2438062 7115524 2.67 1622755 207970 +3846 11234857 1956807 18797524 0.6 4450479 775153 +3853 22035143 3466334 3509429 6.28 1180865 185761 +4213 37690253 2909674 8442667 4.46 2117783 163492 +4233 9659779 1716627 1460476 6.61 464342 82518 +6907 10925460 7135725 2455406 4.45 331743 216670 +6924 49244174 2617659 0 0 5081977 270141 +6931 37046050 2256981 167238 221.52 2375370 144716 +6937 31757829 2180412 19007143 1.67 6310174 433241 +6938 59531348 3842509 12220095 4.87 3706517 239241 +6944 18097371 610613 52351790 0.35 6939290 234134 +6945 4280343 239403 105533 40.56 2439716 136455 +6948 487991 487991 0 0 116352 116352 +6949 1726000 1726000 1042512 1.66 384569 384569 +7405 1600232 1052548 0 0 34253 22530 +8405 120698449 9107050 2173006 55.54 6879331 519066 +8406 143759706 9751902 34718190 4.14 12483252 846798 +8409 242944749 7641259 0 0 27011422 849581 +8413 176036022 6075358 1039346191 0.17 28255761 975163 +8415 66444287 4153873 0 0 5528196 345604 +8416 209611630 11592077 64785321 3.24 12094006 668830 +8418 140282161 13086647 0 0 14999905 1399312 +8423 23173181 10707854 423579905 0.05 4394647 2030677 +410 103914317 13089837 220069619 0.47 12518311 1576902 +413 11735524 4552541 10848952 1.08 1375739 533688 +414 52105824 10680649 257086476 0.2 9049212 1854907 +416 9451342 5540141 78595143 0.12 1521838 892064 +419 24329562 3707815 100257476 0.24 4226096 644055 +420 81091313 7990515 0 0 2945255 290217 +421 10185543 1991723 242860 41.94 1230163 240551 +427 27071383 6237607 23294190 1.16 7419886 1709641 +429 5886358 2102696 30074878 0.2 810654 289578 +430 3935143 1509619 477117709 0.01 550444 211164 +505 54284295 2696148 16082048 3.38 3851946 191315 +509 86345063 5113561 134828333 0.64 10254237 607280 +512 43763770 8349432 78728476 0.56 7858014 1499184 +515 6553053 950162 2000810 3.28 1853993 268820 +516 8429560 1515243 27357286 0.31 2927061 526149 +522 54196483 10628981 20441000 2.65 4462792 875240 +523 50798881 6568210 0 0 5296862 684875 +524 18272055 5030115 31558048 0.58 2307804 635315 +1326 253574505 5659238 342713757 0.74 58017289 1294821 +1327 16728575 3649946 851938524 0.02 2242800 489348 +8319 7412835 2342332 612180480 0.01 1606561 507647 +8408 153202266 20798144 0 0 12798555 1737482 +8411 55697905 16511125 1198138870 0.05 14975209 4439261 +102 13446906 4992198 2381571 5.65 746183 277023 +126 36788393 6405547 0 0 369704 64372 +135 2400743 316019 2680238 0.9 81014 10664 +2103 27097178 6064330 2094190 12.94 1201639 268926 +2114 59950686 6946735 5030476 11.92 1858509 215353 +2134 39954103 8267201 1899524 21.03 1118458 231429 +2138 30337066 925257 1029048 29.48 449651 13714 +3909 27983927 4789000 5320571 5.26 2499454 427741 +6905 59789371 8102951 1331143 44.92 1542254 209014 +6910 9412623 1056635 0 0 591298 66378 +6925 26691397 2689139 919143 29.04 1545286 155687 +7116 37167433 5028880 0 0 1812206 245198 +7151 63569484 16284879 769095 82.65 1548240 396620 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195241- cantons analyses - en m3 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195241- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +0.000101 2.341729 0.761921 0.573629 0.584486 0.190172 0.734307 0.38094 0.970696 0.18328 0.095081 +0.000117 13.611943 3.403735 8.423438 5.148751 1.287471 7.365543 2.479981 12.412633 2.786035 0.938059 +0.000118 2.405113 0.520843 0.218757 0.328484 0.071135 0.318775 0.18625 0.356935 0.043538 0.025438 +0.000119 20.019295 4.037149 4.191405 3.494148 0.70464 11.846379 2.732665 4.789801 2.067655 0.476957 +0.00012 19.199685 1.61749 3.669709 3.658335 0.308199 11.048378 1.117069 4.539821 2.105174 0.212848 +0.00014 7.470388 0.555417 0.419457 2.166189 0.161054 4.667271 0.398512 0.558762 1.353369 0.115557 +0.000518 6.072986 5.973098 3.911576 0.51762 0.509106 6.548414 6.473104 5.070826 0.558142 0.551723 +0.000717 17.094634 3.16569 0.380052 1.412687 0.26161 14.206568 3.35066 0.300758 1.17402 0.276896 +0.000722 25.730671 4.154819 0.318838 2.089674 0.337427 33.958915 4.56257 0.675474 2.757918 0.370542 +0.000724 23.330679 6.32955 0.107614 1.745942 0.473669 22.8803 7.173988 0.25053 1.712238 0.536862 +0.002602 85.393149 20.913682 13.756076 23.280832 5.701721 108.792475 24.239727 16.835708 29.660217 6.608505 +0.002604 124.115528 5.697101 1.217048 33.392947 1.53279 151.62736 6.336162 1.957566 40.794931 1.704727 +0.002607 107.406176 20.893738 6.452943 14.547936 2.830012 128.205316 23.376207 5.963191 17.365135 3.166257 +0.002611 42.827739 4.929098 1.886452 6.056924 0.697099 42.481525 5.42991 2.805221 6.007961 0.767926 +0.002613 43.742462 5.418314 3.845586 8.93101 1.106271 59.736669 6.57845 3.615312 12.196588 1.343139 +0.002615 85.445199 8.040812 4.022067 22.805485 2.146108 77.819205 8.172304 9.276599 20.770093 2.181203 +0.002616 74.715755 12.084643 6.181367 10.129114 1.638299 67.592573 11.914158 6.058549 9.163434 1.615186 +0.002619 99.539112 11.248359 2.713535 14.150846 1.599108 100.73788 11.390622 5.48112 14.321267 1.619333 +0.002621 103.393799 8.480434 3.456798 16.02432 1.314326 116.040299 9.694335 5.475886 17.984317 1.502461 +0.002623 48.013348 2.703254 1.532132 7.532611 0.424102 52.108657 2.910085 1.893185 8.175106 0.456551 +0.002625 62.755639 6.372097 5.056037 2.463817 0.250172 65.50829 6.390334 2.619503 2.571887 0.250888 +0.002626 83.903845 8.729449 3.458965 12.752564 1.326791 91.570081 9.907235 7.658636 13.917757 1.505803 +0.002628 23.114078 5.14524 6.882843 5.330112 1.186494 28.145266 6.82737 6.886378 6.490305 1.574393 +0.002629 6.67234 1.463447 3.494542 3.39975 0.745669 8.221574 1.761355 0.777969 4.189129 0.897461 +0.002632 5.374353 1.265407 0.60139 2.371852 0.558459 7.349634 1.468994 0.612703 3.243599 0.648308 +0.002634 62.205719 3.492916 7.072728 16.904112 0.949184 77.255887 4.021496 7.823647 20.993925 1.092823 +0.003006 117.206301 7.37949 0.11491 14.287564 0.899567 110.358919 7.370789 0.136045 13.452861 0.898506 +0.003023 199.980774 9.832495 1.516696 5.081687 0.249852 198.30585 10.33973 0.876913 5.039125 0.262742 +0.003026 129.195446 10.509616 0.552781 2.787567 0.226759 124.924841 12.329809 0.692753 2.695423 0.266032 +0.003802 42.553212 4.340308 1.689062 3.028033 0.308851 29.941626 4.672505 2.204683 2.130608 0.33249 +0.003807 41.321048 3.407804 1.984129 3.861069 0.318428 26.455818 3.542381 2.559996 2.472051 0.331003 +0.003808 71.917576 4.810994 2.790162 7.19405 0.481253 63.295941 4.822399 4.568417 6.331612 0.482394 +0.003815 12.622648 2.388937 0.238714 0.747354 0.141443 7.930489 2.48165 0.295723 0.469544 0.146932 +0.003819 50.326678 7.578489 3.20979 3.430227 0.516544 35.196463 6.564526 3.253579 2.398964 0.447433 +0.003822 0.883148 0.882511 20.436857 0.197544 0.197401 0.548164 0.548164 30.088511 0.122614 0.122614 +0.003824 22.12513 2.878897 5.188486 4.197346 0.546154 18.489135 3.259962 6.103692 3.507563 0.618446 +0.003825 15.532646 2.784122 0.558567 1.490619 0.267183 9.920588 2.624358 1.139189 0.952047 0.251851 +0.00383 12.513456 2.257665 5.167457 2.090164 0.377105 1.960263 1.448749 4.35326 0.327429 0.241989 +0.003837 16.196735 2.115515 0.711552 1.381604 0.180456 11.668737 2.209486 1.105724 0.99536 0.188472 +0.003846 10.396251 1.590815 1.879752 4.118281 0.630171 8.758225 1.463401 2.536342 3.469407 0.579699 +0.003853 18.346389 3.036171 0.350943 0.983185 0.162709 13.991566 3.087664 0.285872 0.749809 0.165468 +0.004213 21.039191 2.13657 0.844267 1.182174 0.120052 27.350809 2.313562 0.966737 1.536819 0.129997 +0.004233 3.82564 1.438618 0.146048 0.183897 0.069154 5.517545 1.549681 0.15854 0.265226 0.074493 +0.006907 6.581556 5.043707 0.245541 0.199844 0.153148 5.645299 4.509145 0.157585 0.171415 0.136916 +0.006924 25.538485 2.125597 0 2.63556 0.219361 13.144679 1.889444 0 1.356525 0.19499 +0.006931 18.537988 1.836014 0.016724 1.188644 0.117724 9.861268 1.549136 0.02754 0.632298 0.09933 +0.006937 28.46818 1.927969 1.900714 5.656532 0.383081 22.001409 2.219286 1.969266 4.371606 0.440965 +0.006938 53.103503 3.432937 1.22201 3.306309 0.21374 46.587982 3.572943 1.972266 2.900643 0.222457 +0.006944 17.116491 0.564979 5.235179 6.563179 0.216636 14.016942 0.544316 5.133861 5.374683 0.208714 +0.006945 3.646145 0.239403 0.010553 2.078235 0.136455 2.696219 0.250094 0 1.536795 0.142549 +0.006948 0.343819 0.343819 0 0.081977 0.081977 0.302021 0.302021 0 0.072011 0.072011 +0.006949 1.439043 1.439043 0.104251 0.320632 0.320632 1.332336 1.332336 0.156426 0.296857 0.296857 +0.007405 0.80597 0.708877 0 0.017252 0.015174 0.698943 0.698943 0 0.014961 0.014961 +0.008405 131.775687 8.109062 0.217301 7.51069 0.462184 140.293066 8.604902 1.481742 7.996147 0.490445 +0.008406 133.350152 8.207091 3.471819 11.579347 0.712656 143.829093 8.390506 5.790008 12.489277 0.728582 +0.008409 175.437246 6.780109 0 19.505709 0.753836 205.272136 7.1802 1.454691 22.822853 0.798319 +0.008413 168.618336 5.524178 103.934619 27.065139 0.886693 172.955148 5.956469 20.693445 27.761247 0.95608 +0.008415 36.694311 3.658207 0 3.052984 0.304365 45.507841 4.396016 0.007549 3.786274 0.365751 +0.008416 167.587633 9.849951 6.478532 9.669338 0.568315 190.104269 9.926917 5.641193 10.968485 0.572755 +0.008418 97.304073 10.871341 0 10.404401 1.162436 104.191663 11.883667 0.338284 11.140868 1.270681 +0.008423 23.230097 8.896959 42.35799 4.40544 1.687252 24.815411 9.423056 10.204106 4.706085 1.787023 +0.00041 57.68671 10.85703 22.006962 6.949381 1.307921 55.011667 12.820776 4.664238 6.627125 1.544488 +0.000413 7.678803 3.625731 1.084895 0.900175 0.425039 3.842002 2.599793 1.949833 0.450392 0.30477 +0.000414 28.730502 8.65011 25.708648 4.989623 1.502264 26.987136 9.725091 7.82113 4.686853 1.688955 +0.000416 6.433231 4.272668 7.859514 1.035868 0.687978 5.774668 4.374504 5.889341 0.929827 0.704375 +0.000419 12.915511 3.112384 10.025748 2.243451 0.540628 12.780242 3.645846 0.859772 2.219955 0.633291 +0.00042 48.347902 6.455606 0 1.756007 0.234469 46.862727 7.586264 0.098948 1.702065 0.275535 +0.000421 7.482909 1.598709 0.024286 0.903751 0.193085 1.093098 0.691178 0.07234 0.132019 0.083477 +0.000427 15.257691 5.089449 2.329419 4.181919 1.394947 15.233099 5.074144 4.169108 4.175179 1.390752 +0.000429 4.288938 1.715821 3.007488 0.590661 0.236298 1.100434 0.878145 1.904867 0.151549 0.120936 +0.00043 2.565183 1.185642 47.711771 0.358815 0.165847 1.488436 0.864231 9.953381 0.208201 0.120888 +0.000505 33.58511 2.338153 1.608205 2.383157 0.165912 41.627226 2.53801 1.663783 2.953816 0.180094 +0.000509 55.275553 4.487985 13.482833 6.564459 0.532988 73.957198 5.161842 12.239339 8.783069 0.613014 +0.000512 26.709049 6.555539 7.872848 4.79575 1.177081 26.432475 7.037891 10.047226 4.746089 1.26369 +0.000515 3.584295 0.863013 0.200081 1.014071 0.244164 3.815602 0.939074 0.080886 1.079512 0.265683 +0.000516 4.650743 1.268064 2.735729 1.614913 0.44032 4.932511 1.363706 0.809955 1.712754 0.47353 +0.000522 34.405608 8.979685 2.0441 2.833119 0.739429 35.266183 10.031509 2.244727 2.903983 0.826041 +0.000523 35.268063 5.531416 0 3.677444 0.576767 42.117724 6.343513 0.336162 4.391667 0.661446 +0.000524 11.58258 4.261015 3.155805 1.462907 0.538176 13.236307 4.843236 2.6615 1.671777 0.611712 +0.001326 288.776775 4.67092 34.271376 66.071491 1.068696 281.422795 4.965231 21.891082 64.388917 1.136034 +0.001327 11.101462 3.01205 85.193852 1.488373 0.403826 6.342101 2.380881 33.38636 0.850285 0.319205 +0.008319 4.734333 1.859051 61.218048 1.026058 0.402906 2.081203 1.235921 46.070747 0.451053 0.267857 +0.008408 128.690152 18.582282 0 10.750807 1.552368 117.253162 19.119132 1.45296 9.795358 1.597217 +0.008411 34.040124 13.560218 119.813887 9.152193 3.645866 34.358289 14.866805 28.910291 9.237737 3.997161 +0.000102 6.898556 3.388769 0.238157 0.382808 0.188046 4.079309 2.654383 0.25469 0.226365 0.147295 +0.000126 22.00381 4.507407 0 0.221127 0.045297 14.980587 3.512852 0 0.150547 0.035302 +0.000135 1.545847 0.286545 0.268024 0.052165 0.00967 0 0 0.067882 0 0 +0.002103 14.937501 4.32133 0.209419 0.662412 0.191632 6.079408 2.84364 0.390772 0.269595 0.126103 +0.002114 32.371059 5.454727 0.503048 1.003523 0.1691 6.396093 3.096884 0.513344 0.198283 0.096005 +0.002134 21.219385 5.914196 0.189952 0.594007 0.16556 7.932032 3.583646 0.29915 0.222046 0.100319 +0.002138 17.743929 0.780319 0.102905 0.262998 0.011566 5.990408 0.531527 0.059659 0.088789 0.007878 +0.003909 23.781083 3.908918 0.532057 2.124067 0.349135 5.424794 2.504901 0.877519 0.484529 0.223731 +0.006905 35.379327 5.988591 0.133114 0.912602 0.154474 28.162862 4.917479 0.178476 0.726455 0.126845 +0.00691 4.990309 0.852636 0 0.31349 0.053562 2.98766 0.64814 0 0.187684 0.040716 +0.006925 17.353595 2.179915 0.091914 1.004679 0.126205 12.399606 1.774096 0.035088 0.71787 0.10271 +0.007116 21.791419 3.639785 0 1.062504 0.177468 11.372229 2.623471 0.130689 0.554486 0.127915 +0.007151 36.787632 11.936776 0.07691 0.895966 0.290721 17.680343 8.673372 0.266408 0.430606 0.211241 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195241- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 2.341729 0.761921 0.573629 0.584486 0.190172 0.734307 0.38094 0.970696 0.18328 0.095081 +117 13.611943 3.403735 8.423438 5.148751 1.287471 7.365543 2.479981 12.412633 2.786035 0.938059 +118 2.405113 0.520843 0.218757 0.328484 0.071135 0.318775 0.18625 0.356935 0.043538 0.025438 +119 20.019295 4.037149 4.191405 3.494148 0.70464 11.846379 2.732665 4.789801 2.067655 0.476957 +120 19.199685 1.61749 3.669709 3.658335 0.308199 11.048378 1.117069 4.539821 2.105174 0.212848 +140 7.470388 0.555417 0.419457 2.166189 0.161054 4.667271 0.398512 0.558762 1.353369 0.115557 +518 6.072986 5.973098 3.911576 0.51762 0.509106 6.548414 6.473104 5.070826 0.558142 0.551723 +717 17.094634 3.16569 0.380052 1.412687 0.26161 14.206568 3.35066 0.300758 1.17402 0.276896 +722 25.730671 4.154819 0.318838 2.089674 0.337427 33.958915 4.56257 0.675474 2.757918 0.370542 +724 23.330679 6.32955 0.107614 1.745942 0.473669 22.8803 7.173988 0.25053 1.712238 0.536862 +2602 85.393149 20.913682 13.756076 23.280832 5.701721 108.792475 24.239727 16.835708 29.660217 6.608505 +2604 124.115528 5.697101 1.217048 33.392947 1.53279 151.62736 6.336162 1.957566 40.794931 1.704727 +2607 107.406176 20.893738 6.452943 14.547936 2.830012 128.205316 23.376207 5.963191 17.365135 3.166257 +2611 42.827739 4.929098 1.886452 6.056924 0.697099 42.481525 5.42991 2.805221 6.007961 0.767926 +2613 43.742462 5.418314 3.845586 8.93101 1.106271 59.736669 6.57845 3.615312 12.196588 1.343139 +2615 85.445199 8.040812 4.022067 22.805485 2.146108 77.819205 8.172304 9.276599 20.770093 2.181203 +2616 74.715755 12.084643 6.181367 10.129114 1.638299 67.592573 11.914158 6.058549 9.163434 1.615186 +2619 99.539112 11.248359 2.713535 14.150846 1.599108 100.73788 11.390622 5.48112 14.321267 1.619333 +2621 103.393799 8.480434 3.456798 16.02432 1.314326 116.040299 9.694335 5.475886 17.984317 1.502461 +2623 48.013348 2.703254 1.532132 7.532611 0.424102 52.108657 2.910085 1.893185 8.175106 0.456551 +2625 62.755639 6.372097 5.056037 2.463817 0.250172 65.50829 6.390334 2.619503 2.571887 0.250888 +2626 83.903845 8.729449 3.458965 12.752564 1.326791 91.570081 9.907235 7.658636 13.917757 1.505803 +2628 23.114078 5.14524 6.882843 5.330112 1.186494 28.145266 6.82737 6.886378 6.490305 1.574393 +2629 6.67234 1.463447 3.494542 3.39975 0.745669 8.221574 1.761355 0.777969 4.189129 0.897461 +2632 5.374353 1.265407 0.60139 2.371852 0.558459 7.349634 1.468994 0.612703 3.243599 0.648308 +2634 62.205719 3.492916 7.072728 16.904112 0.949184 77.255887 4.021496 7.823647 20.993925 1.092823 +3006 117.206301 7.37949 0.11491 14.287564 0.899567 110.358919 7.370789 0.136045 13.452861 0.898506 +3023 199.980774 9.832495 1.516696 5.081687 0.249852 198.30585 10.33973 0.876913 5.039125 0.262742 +3026 129.195446 10.509616 0.552781 2.787567 0.226759 124.924841 12.329809 0.692753 2.695423 0.266032 +3802 42.553212 4.340308 1.689062 3.028033 0.308851 29.941626 4.672505 2.204683 2.130608 0.33249 +3807 41.321048 3.407804 1.984129 3.861069 0.318428 26.455818 3.542381 2.559996 2.472051 0.331003 +3808 71.917576 4.810994 2.790162 7.19405 0.481253 63.295941 4.822399 4.568417 6.331612 0.482394 +3815 12.622648 2.388937 0.238714 0.747354 0.141443 7.930489 2.48165 0.295723 0.469544 0.146932 +3819 50.326678 7.578489 3.20979 3.430227 0.516544 35.196463 6.564526 3.253579 2.398964 0.447433 +3822 0.883148 0.882511 20.436857 0.197544 0.197401 0.548164 0.548164 30.088511 0.122614 0.122614 +3824 22.12513 2.878897 5.188486 4.197346 0.546154 18.489135 3.259962 6.103692 3.507563 0.618446 +3825 15.532646 2.784122 0.558567 1.490619 0.267183 9.920588 2.624358 1.139189 0.952047 0.251851 +3830 12.513456 2.257665 5.167457 2.090164 0.377105 1.960263 1.448749 4.35326 0.327429 0.241989 +3837 16.196735 2.115515 0.711552 1.381604 0.180456 11.668737 2.209486 1.105724 0.99536 0.188472 +3846 10.396251 1.590815 1.879752 4.118281 0.630171 8.758225 1.463401 2.536342 3.469407 0.579699 +3853 18.346389 3.036171 0.350943 0.983185 0.162709 13.991566 3.087664 0.285872 0.749809 0.165468 +4213 21.039191 2.13657 0.844267 1.182174 0.120052 27.350809 2.313562 0.966737 1.536819 0.129997 +4233 3.82564 1.438618 0.146048 0.183897 0.069154 5.517545 1.549681 0.15854 0.265226 0.074493 +6907 6.581556 5.043707 0.245541 0.199844 0.153148 5.645299 4.509145 0.157585 0.171415 0.136916 +6924 25.538485 2.125597 0 2.63556 0.219361 13.144679 1.889444 0 1.356525 0.19499 +6931 18.537988 1.836014 0.016724 1.188644 0.117724 9.861268 1.549136 0.02754 0.632298 0.09933 +6937 28.46818 1.927969 1.900714 5.656532 0.383081 22.001409 2.219286 1.969266 4.371606 0.440965 +6938 53.103503 3.432937 1.22201 3.306309 0.21374 46.587982 3.572943 1.972266 2.900643 0.222457 +6944 17.116491 0.564979 5.235179 6.563179 0.216636 14.016942 0.544316 5.133861 5.374683 0.208714 +6945 3.646145 0.239403 0.010553 2.078235 0.136455 2.696219 0.250094 0 1.536795 0.142549 +6948 0.343819 0.343819 0 0.081977 0.081977 0.302021 0.302021 0 0.072011 0.072011 +6949 1.439043 1.439043 0.104251 0.320632 0.320632 1.332336 1.332336 0.156426 0.296857 0.296857 +7405 0.80597 0.708877 0 0.017252 0.015174 0.698943 0.698943 0 0.014961 0.014961 +8405 131.775687 8.109062 0.217301 7.51069 0.462184 140.293066 8.604902 1.481742 7.996147 0.490445 +8406 133.350152 8.207091 3.471819 11.579347 0.712656 143.829093 8.390506 5.790008 12.489277 0.728582 +8409 175.437246 6.780109 0 19.505709 0.753836 205.272136 7.1802 1.454691 22.822853 0.798319 +8413 168.618336 5.524178 103.934619 27.065139 0.886693 172.955148 5.956469 20.693445 27.761247 0.95608 +8415 36.694311 3.658207 0 3.052984 0.304365 45.507841 4.396016 0.007549 3.786274 0.365751 +8416 167.587633 9.849951 6.478532 9.669338 0.568315 190.104269 9.926917 5.641193 10.968485 0.572755 +8418 97.304073 10.871341 0 10.404401 1.162436 104.191663 11.883667 0.338284 11.140868 1.270681 +8423 23.230097 8.896959 42.35799 4.40544 1.687252 24.815411 9.423056 10.204106 4.706085 1.787023 +410 57.68671 10.85703 22.006962 6.949381 1.307921 55.011667 12.820776 4.664238 6.627125 1.544488 +413 7.678803 3.625731 1.084895 0.900175 0.425039 3.842002 2.599793 1.949833 0.450392 0.30477 +414 28.730502 8.65011 25.708648 4.989623 1.502264 26.987136 9.725091 7.82113 4.686853 1.688955 +416 6.433231 4.272668 7.859514 1.035868 0.687978 5.774668 4.374504 5.889341 0.929827 0.704375 +419 12.915511 3.112384 10.025748 2.243451 0.540628 12.780242 3.645846 0.859772 2.219955 0.633291 +420 48.347902 6.455606 0 1.756007 0.234469 46.862727 7.586264 0.098948 1.702065 0.275535 +421 7.482909 1.598709 0.024286 0.903751 0.193085 1.093098 0.691178 0.07234 0.132019 0.083477 +427 15.257691 5.089449 2.329419 4.181919 1.394947 15.233099 5.074144 4.169108 4.175179 1.390752 +429 4.288938 1.715821 3.007488 0.590661 0.236298 1.100434 0.878145 1.904867 0.151549 0.120936 +430 2.565183 1.185642 47.711771 0.358815 0.165847 1.488436 0.864231 9.953381 0.208201 0.120888 +505 33.58511 2.338153 1.608205 2.383157 0.165912 41.627226 2.53801 1.663783 2.953816 0.180094 +509 55.275553 4.487985 13.482833 6.564459 0.532988 73.957198 5.161842 12.239339 8.783069 0.613014 +512 26.709049 6.555539 7.872848 4.79575 1.177081 26.432475 7.037891 10.047226 4.746089 1.26369 +515 3.584295 0.863013 0.200081 1.014071 0.244164 3.815602 0.939074 0.080886 1.079512 0.265683 +516 4.650743 1.268064 2.735729 1.614913 0.44032 4.932511 1.363706 0.809955 1.712754 0.47353 +522 34.405608 8.979685 2.0441 2.833119 0.739429 35.266183 10.031509 2.244727 2.903983 0.826041 +523 35.268063 5.531416 0 3.677444 0.576767 42.117724 6.343513 0.336162 4.391667 0.661446 +524 11.58258 4.261015 3.155805 1.462907 0.538176 13.236307 4.843236 2.6615 1.671777 0.611712 +1326 288.776775 4.67092 34.271376 66.071491 1.068696 281.422795 4.965231 21.891082 64.388917 1.136034 +1327 11.101462 3.01205 85.193852 1.488373 0.403826 6.342101 2.380881 33.38636 0.850285 0.319205 +8319 4.734333 1.859051 61.218048 1.026058 0.402906 2.081203 1.235921 46.070747 0.451053 0.267857 +8408 128.690152 18.582282 0 10.750807 1.552368 117.253162 19.119132 1.45296 9.795358 1.597217 +8411 34.040124 13.560218 119.813887 9.152193 3.645866 34.358289 14.866805 28.910291 9.237737 3.997161 +102 6.898556 3.388769 0.238157 0.382808 0.188046 4.079309 2.654383 0.25469 0.226365 0.147295 +126 22.00381 4.507407 0 0.221127 0.045297 14.980587 3.512852 0 0.150547 0.035302 +135 1.545847 0.286545 0.268024 0.052165 0.00967 0 0 0.067882 0 0 +2103 14.937501 4.32133 0.209419 0.662412 0.191632 6.079408 2.84364 0.390772 0.269595 0.126103 +2114 32.371059 5.454727 0.503048 1.003523 0.1691 6.396093 3.096884 0.513344 0.198283 0.096005 +2134 21.219385 5.914196 0.189952 0.594007 0.16556 7.932032 3.583646 0.29915 0.222046 0.100319 +2138 17.743929 0.780319 0.102905 0.262998 0.011566 5.990408 0.531527 0.059659 0.088789 0.007878 +3909 23.781083 3.908918 0.532057 2.124067 0.349135 5.424794 2.504901 0.877519 0.484529 0.223731 +6905 35.379327 5.988591 0.133114 0.912602 0.154474 28.162862 4.917479 0.178476 0.726455 0.126845 +6910 4.990309 0.852636 0 0.31349 0.053562 2.98766 0.64814 0 0.187684 0.040716 +6925 17.353595 2.179915 0.091914 1.004679 0.126205 12.399606 1.774096 0.035088 0.71787 0.10271 +7116 21.791419 3.639785 0 1.062504 0.177468 11.372229 2.623471 0.130689 0.554486 0.127915 +7151 36.787632 11.936776 0.07691 0.895966 0.290721 17.680343 8.673372 0.266408 0.430606 0.211241 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_195555- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 2.265331 0.761921 0.573629 0.565418 0.190172 0.734307 0.38094 0.970696 0.18328 0.095081 +117 12.914545 3.40374 8.423438 4.884959 1.287473 7.017801 2.479987 12.412633 2.654501 0.938061 +118 2.375989 0.520836 0.218757 0.324507 0.071135 0.318775 0.18625 0.356935 0.043538 0.025438 +119 19.394242 4.037171 4.191405 3.385052 0.704644 11.744991 2.732687 4.789801 2.049959 0.47696 +120 18.530018 1.617512 3.669709 3.530736 0.308203 10.929231 1.117067 4.539821 2.082471 0.212848 +140 7.421114 0.55539 0.419457 2.151901 0.161046 4.617482 0.398479 0.558762 1.338932 0.115547 +518 6.072986 5.973098 3.911576 0.51762 0.509106 6.548414 6.473104 5.070826 0.558142 0.551723 +717 17.093153 3.165688 0.380052 1.412565 0.26161 14.206568 3.35066 0.300758 1.17402 0.276896 +722 25.55086 4.154767 0.318838 2.075071 0.337423 33.403755 4.56244 0.675474 2.712831 0.370531 +724 23.330679 6.32955 0.107614 1.745942 0.473669 22.8803 7.173988 0.25053 1.712238 0.536862 +2602 81.004048 20.915785 13.756076 22.084227 5.702294 105.166156 24.240815 16.835708 28.67157 6.608801 +2604 114.896429 5.693677 1.217048 30.912574 1.531868 141.852639 6.331658 1.957566 38.165069 1.703515 +2607 107.030368 20.893281 6.452943 14.497034 2.82995 127.761043 23.373539 5.963191 17.30496 3.165896 +2611 39.024318 4.932032 1.886452 5.519025 0.697514 38.188502 5.436396 2.805221 5.400819 0.768844 +2613 39.039836 5.419086 3.845586 7.970862 1.106428 53.199334 6.584044 3.615312 10.861843 1.344281 +2615 85.420838 8.042025 4.022067 22.798983 2.146432 77.818748 8.172655 9.276599 20.769971 2.181297 +2616 69.52707 12.083086 6.181367 9.425691 1.638088 66.069071 11.913803 6.058549 8.956895 1.615138 +2619 82.691108 11.24471 2.713535 11.755672 1.598589 84.150558 11.386644 5.48112 11.963153 1.618767 +2621 90.829227 8.478752 3.456798 14.07702 1.314066 103.33608 9.692203 5.475886 16.015375 1.50213 +2623 40.575465 2.701275 1.532132 6.365713 0.423792 42.237583 2.898371 1.893185 6.626475 0.454713 +2625 61.85688 6.374261 5.056037 2.428531 0.250257 64.844346 6.396391 2.619503 2.54582 0.251125 +2626 74.39837 8.736583 3.458965 11.307825 1.327875 79.20992 9.905975 7.658636 12.039134 1.505611 +2628 22.214204 5.149199 6.882843 5.122601 1.187406 27.034472 6.834087 6.886378 6.234156 1.575942 +2629 6.194281 1.462599 3.494542 3.156165 0.745237 7.699611 1.760039 0.777969 3.923174 0.896791 +2632 5.008002 1.265099 0.60139 2.210171 0.558323 7.101674 1.46885 0.612703 3.134167 0.648245 +2634 58.526731 3.496064 7.072728 15.904365 0.950039 73.245389 4.026542 7.823647 19.904091 1.094194 +3006 87.730592 7.37955 0.11491 10.694446 0.899574 83.189312 7.367658 0.136045 10.140859 0.898125 +3023 177.428863 9.832498 1.516696 4.508623 0.249852 176.320067 10.339775 0.876913 4.480447 0.262743 +3026 120.2386 10.506864 0.552781 2.594311 0.2267 118.912032 12.340924 0.692753 2.565688 0.266272 +3802 39.269403 4.340135 1.689062 2.794361 0.308839 29.214104 4.672406 2.204683 2.078839 0.332483 +3807 37.516334 3.407179 1.984129 3.505553 0.318369 25.837638 3.542299 2.559996 2.414288 0.330995 +3808 64.390656 4.810671 2.790162 6.441118 0.48122 59.927058 4.823396 4.568417 5.994616 0.482493 +3815 11.843882 2.387708 0.238714 0.701246 0.14137 7.881702 2.481669 0.295723 0.466655 0.146933 +3819 45.755104 7.578287 3.20979 3.118632 0.51653 34.036606 6.564438 3.253579 2.319909 0.447427 +3822 0.883148 0.882511 20.436857 0.197544 0.197401 0.548164 0.548164 30.088511 0.122614 0.122614 +3824 21.638004 2.879859 5.188486 4.104933 0.546336 18.39018 3.260223 6.103692 3.488791 0.618495 +3825 14.547252 2.783827 0.558567 1.396054 0.267155 9.780957 2.624343 1.139189 0.938647 0.25185 +3830 12.42063 2.257664 5.167457 2.074659 0.377105 1.960263 1.448749 4.35326 0.327429 0.241989 +3837 14.728244 2.117538 0.711552 1.25634 0.180629 11.288042 2.209483 1.105724 0.962886 0.188472 +3846 9.228173 1.587573 1.879752 3.655569 0.628887 8.168215 1.463034 2.536342 3.235686 0.579554 +3853 16.680195 3.036067 0.350943 0.893893 0.162703 13.537035 3.087575 0.285872 0.725451 0.165463 +4213 20.868601 2.134407 0.844267 1.172589 0.119931 27.074412 2.313691 0.966737 1.521288 0.130004 +4233 3.82564 1.438618 0.146048 0.183897 0.069154 5.517545 1.549681 0.15854 0.265226 0.074493 +6907 6.581556 5.043707 0.245541 0.199844 0.153148 5.645299 4.509145 0.157585 0.171415 0.136916 +6924 25.530832 2.125595 0 2.634771 0.21936 13.144679 1.889444 0 1.356525 0.19499 +6931 18.537983 1.836015 0.016724 1.188644 0.117724 9.861268 1.549136 0.02754 0.632298 0.09933 +6937 25.303987 1.929126 1.900714 5.027818 0.383311 20.744869 2.220603 1.969266 4.121936 0.441226 +6938 46.705076 3.435104 1.22201 2.907933 0.213875 42.59433 3.572879 1.972266 2.651991 0.222453 +6944 14.820403 0.564475 5.235179 5.682763 0.216443 12.574607 0.543457 5.133861 4.821631 0.208384 +6945 3.269613 0.239403 0.010553 1.863618 0.136455 2.560751 0.250094 0 1.45958 0.142549 +6948 0.343819 0.343819 0 0.081977 0.081977 0.302021 0.302021 0 0.072011 0.072011 +6949 1.439043 1.439043 0.104251 0.320632 0.320632 1.332336 1.332336 0.156426 0.296857 0.296857 +7405 0.80597 0.708877 0 0.017252 0.015174 0.698943 0.698943 0 0.014961 0.014961 +8405 102.709397 8.086512 0.217301 5.854027 0.460899 107.746821 8.581448 1.481742 6.14114 0.489108 +8406 109.622052 8.208861 3.471819 9.518938 0.71281 116.504559 8.404729 5.790008 10.116575 0.729818 +8409 161.084224 6.789379 0 17.909891 0.754866 185.14449 7.188273 1.454691 20.584993 0.799217 +8413 134.874503 5.525338 103.934619 21.648875 0.886879 134.247747 5.954294 20.693445 21.548273 0.955731 +8415 36.68563 3.658901 0 3.052262 0.304422 45.499156 4.396912 0.007549 3.785552 0.365825 +8416 148.052384 9.855129 6.478532 8.542209 0.568613 163.001377 9.93637 5.641193 9.404724 0.573301 +8418 91.67678 10.86995 0 9.802693 1.162288 97.962644 11.883415 0.338284 10.47482 1.270654 +8423 21.04243 8.896959 42.35799 3.990563 1.687252 22.282969 9.423056 10.204106 4.225824 1.787023 +410 57.68671 10.85703 22.006962 6.949381 1.307921 55.011667 12.820776 4.664238 6.627125 1.544488 +413 7.652014 3.625731 1.084895 0.897035 0.425039 3.842002 2.599793 1.949833 0.450392 0.30477 +414 28.730502 8.65011 25.708648 4.989623 1.502264 26.987136 9.725091 7.82113 4.686853 1.688955 +416 6.433231 4.272668 7.859514 1.035868 0.687978 5.774668 4.374504 5.889341 0.929827 0.704375 +419 12.915511 3.112384 10.025748 2.243451 0.540628 12.780242 3.645846 0.859772 2.219955 0.633291 +420 48.347902 6.455606 0 1.756007 0.234469 46.862727 7.586264 0.098948 1.702065 0.275535 +421 7.272366 1.598709 0.024286 0.878323 0.193085 1.093098 0.691178 0.07234 0.132019 0.083477 +427 15.25769 5.089449 2.329419 4.181919 1.394947 15.233083 5.074156 4.169108 4.175174 1.390755 +429 4.146324 1.715821 3.007488 0.571021 0.236298 1.100434 0.878145 1.904867 0.151549 0.120936 +430 2.565183 1.185642 47.711771 0.358815 0.165847 1.488436 0.864231 9.953381 0.208201 0.120888 +505 33.371884 2.339158 1.608205 2.368027 0.165984 41.192524 2.540742 1.663783 2.92297 0.180288 +509 54.477933 4.486962 13.482833 6.469735 0.532866 72.192233 5.150558 12.239339 8.573464 0.611674 +512 26.709049 6.555539 7.872848 4.79575 1.177081 26.432475 7.037891 10.047226 4.74609 1.26369 +515 3.584295 0.863013 0.200081 1.014071 0.244164 3.815602 0.939074 0.080886 1.079512 0.265683 +516 4.650743 1.268064 2.735729 1.614913 0.44032 4.932508 1.363707 0.809955 1.712753 0.47353 +522 34.405608 8.979684 2.0441 2.833119 0.739429 35.266172 10.031516 2.244727 2.903982 0.826042 +523 33.957698 5.531425 0 3.540811 0.576769 39.74972 6.343569 0.336162 4.144752 0.661452 +524 11.582581 4.261014 3.155805 1.462907 0.538176 13.236271 4.843254 2.6615 1.671772 0.611714 +1326 221.3387 4.669405 34.271376 50.641808 1.06835 213.928504 4.963572 21.891082 48.946371 1.135654 +1327 11.101462 3.01205 85.193852 1.488373 0.403826 6.342101 2.380881 33.38636 0.850285 0.319205 +8319 4.713478 1.859051 61.218048 1.021538 0.402907 2.081203 1.23592 46.070747 0.451053 0.267857 +8408 118.902232 18.582211 0 9.933122 1.552362 108.508295 19.114895 1.45296 9.06481 1.596863 +8411 33.837989 13.563395 119.813887 9.097846 3.64672 34.336516 14.871599 28.910291 9.231883 3.99845 +102 6.898506 3.38877 0.238157 0.382806 0.188047 4.079309 2.654383 0.25469 0.226365 0.147295 +126 21.866704 4.507407 0 0.219749 0.045297 14.886106 3.512852 0 0.149597 0.035302 +135 1.536609 0.286545 0.268024 0.051853 0.00967 0 0 0.067882 0 0 +2103 14.890091 4.321331 0.209419 0.660309 0.191632 6.079408 2.84364 0.390772 0.269595 0.126103 +2114 32.244904 5.454725 0.503048 0.999612 0.1691 6.396093 3.096884 0.513344 0.198283 0.096005 +2134 21.13001 5.915152 0.189952 0.591505 0.165586 7.932032 3.583646 0.29915 0.222046 0.100319 +2138 17.361943 0.780319 0.102905 0.257336 0.011566 5.986469 0.531527 0.059659 0.088731 0.007878 +3909 21.832366 3.909273 0.532057 1.950012 0.349166 5.424794 2.504901 0.877519 0.484529 0.223731 +6905 34.88468 5.988591 0.133114 0.899843 0.154474 27.8766 4.917504 0.178476 0.719071 0.126846 +6910 4.990309 0.852636 0 0.31349 0.053562 2.98766 0.64814 0 0.187684 0.040716 +6925 16.92219 2.179934 0.091914 0.979703 0.126206 12.176718 1.774128 0.035088 0.704966 0.102712 +7116 21.525194 3.639852 0 1.049523 0.177472 11.372229 2.623471 0.130689 0.554486 0.127915 +7151 36.146475 11.936786 0.07691 0.880351 0.290721 17.680343 8.673372 0.266408 0.430606 0.211241 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_222908- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.172491 0.089854 0.573629 0.043053 0.022427 0.031567 0.031567 0.970696 0.007879 0.007879 +117 1.206778 0.627923 8.423438 0.456467 0.237513 0.734694 0.419442 12.412633 0.2779 0.158655 +118 0.105066 0.089831 0.218757 0.01435 0.012269 0 0 0.356935 0 0 +119 1.265181 0.799671 4.191405 0.220823 0.139574 0.735393 0.609998 4.789801 0.128355 0.106468 +120 1.026833 0.437738 3.669709 0.195654 0.083407 0.439077 0.303711 4.539821 0.083662 0.057869 +140 0.937245 0.207009 0.419457 0.271773 0.060026 0.606458 0.105489 0.558762 0.175855 0.030589 +518 1.639446 1.639446 3.911576 0.139735 0.139735 2.003632 2.003632 5.070826 0.170776 0.170776 +717 2.012119 1.196537 0.380052 0.16628 0.098881 1.567755 1.150214 0.300758 0.129558 0.095053 +722 5.455554 1.427745 0.318838 0.443064 0.115952 7.804581 1.604838 0.675474 0.633836 0.130334 +724 5.030893 2.312414 0.107614 0.376485 0.173049 5.216116 2.565639 0.25053 0.390346 0.191999 +2602 23.224079 9.441881 13.756076 6.331607 2.574151 25.664982 11.64496 16.835708 6.997074 3.174779 +2604 33.446098 3.077182 1.217048 8.998582 0.827907 38.754672 3.722991 1.957566 10.42684 1.001661 +2607 37.280872 12.033809 6.452943 5.049614 1.629954 42.161554 13.809719 5.963191 5.710692 1.870497 +2611 7.004935 1.690281 1.886452 0.990675 0.239048 6.139951 1.881791 2.805221 0.868344 0.266133 +2613 8.606761 1.868993 3.845586 1.757264 0.381597 11.119073 2.501064 3.615312 2.270209 0.510649 +2615 29.735724 4.781365 4.022067 7.936521 1.276155 23.369337 4.552717 9.276599 6.23732 1.215129 +2616 24.288543 5.919056 6.181367 3.292765 0.802438 17.017465 5.448814 6.058549 2.307035 0.738688 +2619 31.200623 5.820158 2.713535 4.435595 0.827415 31.319027 5.87018 5.48112 4.452428 0.834526 +2621 19.910134 3.105952 3.456798 3.08574 0.481371 22.758026 3.810958 5.475886 3.527116 0.590635 +2623 7.50724 0.909852 1.532132 1.177779 0.142743 9.715833 1.049124 1.893185 1.524276 0.164593 +2625 14.042471 3.313047 5.056037 0.551314 0.130072 13.377825 3.569346 2.619503 0.52522 0.140134 +2626 15.837521 3.227111 3.458965 2.407148 0.490489 16.821589 3.938105 7.658636 2.556717 0.598554 +2628 4.128019 1.479588 6.882843 0.951922 0.341193 5.570735 2.216769 6.886378 1.284613 0.511187 +2629 2.014108 0.602483 3.494542 1.026246 0.306982 2.520144 0.789891 0.777969 1.284086 0.402472 +2632 1.379373 0.591389 0.60139 0.608756 0.260997 1.570092 0.769816 0.612703 0.692925 0.339741 +2634 17.785367 1.859967 7.072728 4.83309 0.505437 22.051403 2.318933 7.823647 5.992365 0.630159 +3006 31.669877 3.09769 0.11491 3.860589 0.377611 29.459569 3.096713 0.136045 3.59115 0.377492 +3023 61.139295 4.736471 1.516696 1.553603 0.120358 59.996651 4.970878 0.876913 1.524567 0.126314 +3026 30.925117 5.537946 0.552781 0.667251 0.119489 25.971622 6.637059 0.692753 0.560373 0.143204 +3802 4.810027 1.203546 1.689062 0.342275 0.085643 1.845132 0.892322 2.204683 0.131297 0.063496 +3807 4.104051 0.762899 1.984129 0.383486 0.071286 1.05559 0.521225 2.559996 0.098635 0.048704 +3808 10.42628 1.678271 2.790162 1.04296 0.167881 5.927122 1.487135 4.568417 0.592901 0.148761 +3815 1.194661 0.63311 0.238714 0.070733 0.037485 0.517443 0.422541 0.295723 0.030636 0.025018 +3819 7.759241 2.622833 3.20979 0.528864 0.17877 3.43889 1.91422 3.253579 0.234392 0.130472 +3822 0.165356 0.165356 20.436857 0.036987 0.036987 0.052141 0.052141 30.088511 0.011663 0.011663 +3824 1.651478 0.634216 5.188486 0.313301 0.120317 0.86526 0.623543 6.103692 0.164148 0.118292 +3825 1.711743 0.644163 0.558567 0.164271 0.061818 0.623435 0.379449 1.139189 0.059829 0.036415 +3830 0.630581 0.446249 5.167457 0.105328 0.074538 0.052348 0.052348 4.35326 0.008744 0.008744 +3837 1.919544 0.538473 0.711552 0.16374 0.045933 0.8755 0.348854 1.105724 0.074681 0.029758 +3846 1.397585 0.378345 1.879752 0.553627 0.149874 0.787639 0.274145 2.536342 0.312008 0.108598 +3853 2.230212 0.734501 0.350943 0.119517 0.039362 0.88063 0.477216 0.285872 0.047193 0.025574 +4213 3.035305 0.717826 0.844267 0.170551 0.040334 3.9838 0.767302 0.966737 0.223846 0.043114 +4233 0.336188 0.332422 0.146048 0.01616 0.015979 0.39259 0.389122 0.15854 0.018872 0.018705 +6907 1.463324 1.463324 0.245541 0.044433 0.044433 1.171937 1.171937 0.157585 0.035585 0.035585 +6924 3.065424 0.84358 0 0.31635 0.087057 0.732748 0.642838 0 0.075619 0.066341 +6931 2.357736 0.630818 0.016724 0.151177 0.040448 0.62719 0.431148 0.02754 0.040215 0.027645 +6937 3.091504 0.555157 1.900714 0.614271 0.110308 1.536838 0.456254 1.969266 0.305365 0.090656 +6938 6.967651 1.272659 1.22201 0.433817 0.079238 5.136098 1.155356 1.972266 0.319782 0.071934 +6944 2.257632 0.190656 5.235179 0.865671 0.073106 1.419167 0.169516 5.133861 0.544168 0.065 +6945 0.568841 0.074515 0.010553 0.324229 0.042472 0.38475 0.064832 0 0.2193 0.036953 +6948 0.113087 0.113087 0 0.026963 0.026963 0.087604 0.087604 0 0.020887 0.020887 +6949 0.389401 0.389401 0.104251 0.086762 0.086762 0.279972 0.279972 0.156426 0.06238 0.06238 +7405 0.10004 0.10004 0 0.002141 0.002141 0.046031 0.046031 0 0.000985 0.000985 +8405 43.255179 4.666925 0.217301 2.465373 0.265996 47.073015 5.17113 1.481742 2.682975 0.294734 +8406 31.721893 4.015049 3.471819 2.754544 0.348643 35.429292 4.157191 5.790008 3.076473 0.360986 +8409 29.028334 3.413447 0 3.227469 0.379519 38.875745 4.188472 1.454691 4.322337 0.465689 +8413 43.928508 3.903215 103.934619 7.05102 0.62651 49.6211 4.300695 20.693445 7.964745 0.69031 +8415 8.693156 2.35287 0 0.723275 0.19576 11.939666 3.056595 0.007549 0.993386 0.25431 +8416 30.711959 3.419496 6.478532 1.771994 0.197295 36.643607 4.073703 5.641193 2.114234 0.235041 +8418 18.570114 5.683167 0 1.985641 0.607682 20.788988 6.345343 0.338284 2.222897 0.678486 +8423 9.201931 4.451886 42.35799 1.745088 0.844272 9.963004 4.770264 10.204106 1.88942 0.90465 +410 12.810077 5.531629 22.006962 1.5432 0.666382 10.98093 6.812904 4.664238 1.322847 0.820734 +413 2.393948 1.479867 1.084895 0.280639 0.173483 1.041289 0.810284 1.949833 0.122069 0.094988 +414 6.093291 3.683479 25.708648 1.058221 0.639709 5.350819 4.146345 7.82113 0.929276 0.720095 +416 2.504977 2.120416 7.859514 0.403347 0.341426 2.25184 2.127831 5.889341 0.362587 0.34262 +419 2.667897 1.450613 10.025748 0.463419 0.251974 2.277812 1.807533 0.859772 0.395661 0.313972 +420 12.339022 3.239641 0 0.448156 0.117664 12.298706 3.942073 0.098948 0.446692 0.143177 +421 0.363107 0.20549 0.024286 0.043854 0.024818 0 0 0.07234 0 0 +427 4.007592 2.639512 2.329419 1.098425 0.723453 3.450146 2.821273 4.169108 0.945637 0.773271 +429 0.569 0.447665 3.007488 0.078361 0.061651 0.179526 0.184784 1.904867 0.024724 0.025448 +430 1.090054 0.606352 47.711771 0.152476 0.084816 0.696531 0.469081 9.953381 0.09743 0.065615 +505 8.135167 1.28097 1.608205 0.577261 0.090896 11.743731 1.45356 1.663783 0.833321 0.103143 +509 15.010774 2.514222 13.482833 1.782662 0.298586 22.839465 3.015914 12.239339 2.712388 0.358166 +512 8.729441 3.00379 7.872848 1.567417 0.539346 8.275396 3.221476 10.047226 1.485891 0.578433 +515 1.008309 0.521134 0.200081 0.285271 0.147439 1.147746 0.558353 0.080886 0.324721 0.157969 +516 1.221978 0.688514 2.735729 0.424317 0.239078 1.23819 0.792939 0.809955 0.429946 0.275338 +522 13.464493 5.540168 2.0441 1.108729 0.456203 13.515894 6.080608 2.244727 1.112962 0.500706 +523 11.510862 3.200164 0 1.200252 0.333685 14.986735 3.612314 0.336162 1.562685 0.37666 +524 4.475632 2.472077 3.155805 0.565283 0.312229 4.991376 2.869415 2.6615 0.630423 0.362414 +1326 85.766923 1.957587 34.271376 19.623283 0.447891 85.308643 2.183085 21.891082 19.51843 0.499485 +1327 3.545637 1.358932 85.193852 0.475364 0.182192 1.814058 0.971062 33.38636 0.243211 0.13019 +8319 0.971269 0.552233 61.218048 0.2105 0.119684 0.348919 0.299588 46.070747 0.07562 0.064929 +8408 48.765502 10.670544 0 4.073882 0.89142 44.297591 10.577727 1.45296 3.700632 0.883666 +8411 9.317835 5.511339 119.813887 2.505238 1.481805 9.675257 6.116222 28.910291 2.601337 1.644437 +102 0.728775 0.693738 0.238157 0.040441 0.038496 0.534998 0.499119 0.25469 0.029688 0.027697 +126 2.151944 0.997868 0 0.021626 0.010028 1.58914 0.699271 0 0.01597 0.007027 +135 0 0 0.268024 0 0 0 0 0.067882 0 0 +2103 1.786211 0.914414 0.209419 0.079211 0.04055 0.433377 0.323009 0.390772 0.019218 0.014324 +2114 1.849585 0.842072 0.503048 0.057338 0.026105 0.085471 0.085023 0.513344 0.00265 0.002636 +2134 2.786521 1.340334 0.189952 0.078005 0.037521 0.447506 0.391208 0.29915 0.012527 0.010951 +2138 1.757151 0.165977 0.102905 0.026044 0.00246 0.183716 0.043615 0.059659 0.002723 0.000646 +3909 2.941383 0.925461 0.532057 0.262717 0.08266 0.082767 0.082767 0.877519 0.007393 0.007393 +6905 3.226787 1.30178 0.133114 0.083234 0.033579 2.912163 1.102357 0.178476 0.075119 0.028435 +6910 0.768294 0.175849 0 0.048264 0.011047 0.269453 0.116869 0 0.016927 0.007342 +6925 3.079319 0.682412 0.091914 0.178276 0.039508 1.917106 0.541757 0.035088 0.11099 0.031365 +7116 2.749838 0.869642 0 0.134076 0.042402 1.059585 0.466661 0.130689 0.051663 0.022753 +7151 5.136608 3.120501 0.07691 0.125103 0.076 1.729687 1.630881 0.266408 0.042127 0.03972 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151105_232408- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.172491 0.089854 0.573629 0.043053 0.022427 0.031567 0.031567 0.970696 0.007879 0.007879 +117 1.206778 0.627923 8.423438 0.456467 0.237513 0.734694 0.419442 12.412633 0.2779 0.158655 +118 0.105066 0.089831 0.218757 0.01435 0.012269 0 0 0.356935 0 0 +119 1.265181 0.799671 4.191405 0.220823 0.139574 0.735393 0.609998 4.789801 0.128355 0.106468 +120 1.026833 0.437738 3.669709 0.195654 0.083407 0.439077 0.303711 4.539821 0.083662 0.057869 +140 0.937245 0.207009 0.419457 0.271773 0.060026 0.606458 0.105489 0.558762 0.175855 0.030589 +518 1.639446 1.639446 3.911576 0.139735 0.139735 2.003632 2.003632 5.070826 0.170776 0.170776 +717 2.012119 1.196537 0.380052 0.16628 0.098881 1.567755 1.150214 0.300758 0.129558 0.095053 +722 5.455554 1.427745 0.318838 0.443064 0.115952 7.804581 1.604838 0.675474 0.633836 0.130334 +724 5.030893 2.312414 0.107614 0.376485 0.173049 5.216116 2.565639 0.25053 0.390346 0.191999 +2602 23.224079 9.441881 13.756076 6.331607 2.574151 25.664982 11.64496 16.835708 6.997074 3.174779 +2604 33.446098 3.077182 1.217048 8.998582 0.827907 38.754672 3.722991 1.957566 10.42684 1.001661 +2607 37.280872 12.033809 6.452943 5.049614 1.629954 42.161554 13.809719 5.963191 5.710692 1.870497 +2611 7.004935 1.690281 1.886452 0.990675 0.239048 6.139951 1.881791 2.805221 0.868344 0.266133 +2613 8.606761 1.868993 3.845586 1.757264 0.381597 11.119073 2.501064 3.615312 2.270209 0.510649 +2615 29.735724 4.781365 4.022067 7.936521 1.276155 23.369337 4.552717 9.276599 6.23732 1.215129 +2616 24.288543 5.919056 6.181367 3.292765 0.802438 17.017465 5.448814 6.058549 2.307035 0.738688 +2619 31.200623 5.820158 2.713535 4.435595 0.827415 31.319027 5.87018 5.48112 4.452428 0.834526 +2621 19.910134 3.105952 3.456798 3.08574 0.481371 22.758026 3.810958 5.475886 3.527116 0.590635 +2623 7.50724 0.909852 1.532132 1.177779 0.142743 9.715833 1.049124 1.893185 1.524276 0.164593 +2625 14.042471 3.313047 5.056037 0.551314 0.130072 13.377825 3.569346 2.619503 0.52522 0.140134 +2626 15.837521 3.227111 3.458965 2.407148 0.490489 16.821589 3.938105 7.658636 2.556717 0.598554 +2628 4.128019 1.479588 6.882843 0.951922 0.341193 5.570735 2.216769 6.886378 1.284613 0.511187 +2629 2.014108 0.602483 3.494542 1.026246 0.306982 2.520144 0.789891 0.777969 1.284086 0.402472 +2632 1.379373 0.591389 0.60139 0.608756 0.260997 1.570092 0.769816 0.612703 0.692925 0.339741 +2634 17.785367 1.859967 7.072728 4.83309 0.505437 22.051403 2.318933 7.823647 5.992365 0.630159 +3006 31.669877 3.09769 0.11491 3.860589 0.377611 29.459569 3.096713 0.136045 3.59115 0.377492 +3023 61.139295 4.736471 1.516696 1.553603 0.120358 59.996651 4.970878 0.876913 1.524567 0.126314 +3026 30.925117 5.537946 0.552781 0.667251 0.119489 25.971622 6.637059 0.692753 0.560373 0.143204 +3802 4.810027 1.203546 1.689062 0.342275 0.085643 1.845132 0.892322 2.204683 0.131297 0.063496 +3807 4.104051 0.762899 1.984129 0.383486 0.071286 1.05559 0.521225 2.559996 0.098635 0.048704 +3808 10.42628 1.678271 2.790162 1.04296 0.167881 5.927122 1.487135 4.568417 0.592901 0.148761 +3815 1.194661 0.63311 0.238714 0.070733 0.037485 0.517443 0.422541 0.295723 0.030636 0.025018 +3819 7.759241 2.622833 3.20979 0.528864 0.17877 3.43889 1.91422 3.253579 0.234392 0.130472 +3822 0.165356 0.165356 20.436857 0.036987 0.036987 0.052141 0.052141 30.088511 0.011663 0.011663 +3824 1.651478 0.634216 5.188486 0.313301 0.120317 0.86526 0.623543 6.103692 0.164148 0.118292 +3825 1.711743 0.644163 0.558567 0.164271 0.061818 0.623435 0.379449 1.139189 0.059829 0.036415 +3830 0.630581 0.446249 5.167457 0.105328 0.074538 0.052348 0.052348 4.35326 0.008744 0.008744 +3837 1.919544 0.538473 0.711552 0.16374 0.045933 0.8755 0.348854 1.105724 0.074681 0.029758 +3846 1.397585 0.378345 1.879752 0.553627 0.149874 0.787639 0.274145 2.536342 0.312008 0.108598 +3853 2.230212 0.734501 0.350943 0.119517 0.039362 0.88063 0.477216 0.285872 0.047193 0.025574 +4213 3.035305 0.717826 0.844267 0.170551 0.040334 3.9838 0.767302 0.966737 0.223846 0.043114 +4233 0.336188 0.332422 0.146048 0.01616 0.015979 0.39259 0.389122 0.15854 0.018872 0.018705 +6907 1.463324 1.463324 0.245541 0.044433 0.044433 1.171937 1.171937 0.157585 0.035585 0.035585 +6924 3.065424 0.84358 0 0.31635 0.087057 0.732748 0.642838 0 0.075619 0.066341 +6931 2.357736 0.630818 0.016724 0.151177 0.040448 0.62719 0.431148 0.02754 0.040215 0.027645 +6937 3.091504 0.555157 1.900714 0.614271 0.110308 1.536838 0.456254 1.969266 0.305365 0.090656 +6938 6.967651 1.272659 1.22201 0.433817 0.079238 5.136098 1.155356 1.972266 0.319782 0.071934 +6944 2.257632 0.190656 5.235179 0.865671 0.073106 1.419167 0.169516 5.133861 0.544168 0.065 +6945 0.568841 0.074515 0.010553 0.324229 0.042472 0.38475 0.064832 0 0.2193 0.036953 +6948 0.113087 0.113087 0 0.026963 0.026963 0.087604 0.087604 0 0.020887 0.020887 +6949 0.389401 0.389401 0.104251 0.086762 0.086762 0.279972 0.279972 0.156426 0.06238 0.06238 +7405 0.10004 0.10004 0 0.002141 0.002141 0.046031 0.046031 0 0.000985 0.000985 +8405 43.246058 4.666919 0.217301 2.464853 0.265996 47.073015 5.17113 1.481742 2.682975 0.294734 +8406 31.721893 4.015049 3.471819 2.754544 0.348643 35.429292 4.157191 5.790008 3.076473 0.360986 +8409 29.028318 3.413448 0 3.227467 0.379519 38.875745 4.188472 1.454691 4.322337 0.465689 +8413 43.928424 3.90322 103.934619 7.051006 0.626511 49.6211 4.300695 20.693445 7.964745 0.69031 +8415 8.693156 2.35287 0 0.723275 0.19576 11.939666 3.056595 0.007549 0.993386 0.25431 +8416 30.711958 3.419496 6.478532 1.771994 0.197295 36.643607 4.073703 5.641193 2.114234 0.235041 +8418 18.570114 5.683167 0 1.985641 0.607682 20.788988 6.345343 0.338284 2.222897 0.678486 +8423 9.201931 4.451886 42.35799 1.745088 0.844272 9.963004 4.770264 10.204106 1.88942 0.90465 +410 12.810077 5.531629 22.006962 1.5432 0.666382 10.98093 6.812904 4.664238 1.322847 0.820734 +413 2.393948 1.479867 1.084895 0.280639 0.173483 1.041289 0.810284 1.949833 0.122069 0.094988 +414 6.093291 3.683479 25.708648 1.058221 0.639709 5.350819 4.146345 7.82113 0.929276 0.720095 +416 2.504977 2.120416 7.859514 0.403347 0.341426 2.25184 2.127831 5.889341 0.362587 0.34262 +419 2.667897 1.450613 10.025748 0.463419 0.251974 2.277812 1.807533 0.859772 0.395661 0.313972 +420 12.339022 3.239641 0 0.448156 0.117664 12.298706 3.942073 0.098948 0.446692 0.143177 +421 0.363107 0.20549 0.024286 0.043854 0.024818 0 0 0.07234 0 0 +427 4.007592 2.639512 2.329419 1.098425 0.723453 3.450146 2.821273 4.169108 0.945637 0.773271 +429 0.569 0.447665 3.007488 0.078361 0.061651 0.179526 0.184784 1.904867 0.024724 0.025448 +430 1.090054 0.606352 47.711771 0.152476 0.084816 0.696531 0.469081 9.953381 0.09743 0.065615 +505 8.135167 1.28097 1.608205 0.577261 0.090896 11.743731 1.45356 1.663783 0.833321 0.103143 +509 15.010774 2.514222 13.482833 1.782662 0.298586 22.839465 3.015914 12.239339 2.712388 0.358166 +512 8.729441 3.00379 7.872848 1.567417 0.539346 8.275396 3.221476 10.047226 1.485891 0.578433 +515 1.008309 0.521134 0.200081 0.285271 0.147439 1.147746 0.558353 0.080886 0.324721 0.157969 +516 1.221978 0.688514 2.735729 0.424317 0.239078 1.23819 0.792939 0.809955 0.429946 0.275338 +522 13.464493 5.540168 2.0441 1.108729 0.456203 13.515894 6.080608 2.244727 1.112962 0.500706 +523 11.510862 3.200164 0 1.200252 0.333685 14.986735 3.612314 0.336162 1.562685 0.37666 +524 4.475632 2.472077 3.155805 0.565283 0.312229 4.991376 2.869415 2.6615 0.630423 0.362414 +1326 85.765427 1.957587 34.271376 19.622941 0.447891 85.308643 2.183085 21.891082 19.51843 0.499485 +1327 3.545637 1.358932 85.193852 0.475364 0.182192 1.814058 0.971062 33.38636 0.243211 0.13019 +8319 0.971269 0.552233 61.218048 0.2105 0.119684 0.348919 0.299588 46.070747 0.07562 0.064929 +8408 48.765502 10.670544 0 4.073882 0.89142 44.297591 10.577727 1.45296 3.700632 0.883666 +8411 9.317834 5.511339 119.813887 2.505238 1.481805 9.675257 6.116222 28.910291 2.601337 1.644437 +102 0.728775 0.693738 0.238157 0.040441 0.038496 0.534998 0.499119 0.25469 0.029688 0.027697 +126 2.151944 0.997868 0 0.021626 0.010028 1.58914 0.699271 0 0.01597 0.007027 +135 0 0 0.268024 0 0 0 0 0.067882 0 0 +2103 1.786211 0.914414 0.209419 0.079211 0.04055 0.433377 0.323009 0.390772 0.019218 0.014324 +2114 1.849585 0.842072 0.503048 0.057338 0.026105 0.085471 0.085023 0.513344 0.00265 0.002636 +2134 2.786521 1.340334 0.189952 0.078005 0.037521 0.447506 0.391208 0.29915 0.012527 0.010951 +2138 1.757151 0.165977 0.102905 0.026044 0.00246 0.183716 0.043615 0.059659 0.002723 0.000646 +3909 2.941383 0.925461 0.532057 0.262717 0.08266 0.082767 0.082767 0.877519 0.007393 0.007393 +6905 3.226787 1.30178 0.133114 0.083234 0.033579 2.912163 1.102357 0.178476 0.075119 0.028435 +6910 0.768294 0.175849 0 0.048264 0.011047 0.269453 0.116869 0 0.016927 0.007342 +6925 3.079319 0.682412 0.091914 0.178276 0.039508 1.917106 0.541757 0.035088 0.11099 0.031365 +7116 2.749838 0.869642 0 0.134076 0.042402 1.059585 0.466661 0.130689 0.051663 0.022753 +7151 5.136608 3.120501 0.07691 0.125103 0.076 1.729687 1.630881 0.266408 0.042127 0.03972 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_001546- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.522172 0.411582 0.573629 0.130332 0.102729 0.201304 0.187345 0.970696 0.050245 0.046761 +117 2.616097 1.823662 8.423438 0.989545 0.689805 1.592056 1.214142 12.412633 0.602199 0.459252 +118 0.387061 0.288226 0.218757 0.052864 0.039365 0.076002 0.076002 0.356935 0.01038 0.01038 +119 3.441443 2.41891 4.191405 0.600666 0.422194 1.912026 1.481573 4.789801 0.333723 0.258592 +120 2.341645 1.116235 3.669709 0.44618 0.212689 1.265057 0.714138 4.539821 0.241046 0.136073 +140 1.033115 0.39982 0.419457 0.299572 0.115936 0.679033 0.341933 0.558762 0.196899 0.09915 +518 3.414007 3.414007 3.911576 0.290986 0.290986 3.754186 3.754186 5.070826 0.319981 0.319981 +717 4.664204 2.750617 0.380052 0.385446 0.227309 4.394603 2.903329 0.300758 0.363167 0.239929 +722 6.644114 3.546696 0.318838 0.539591 0.288039 8.414471 3.914393 0.675474 0.683368 0.317901 +724 8.124416 5.435225 0.107614 0.607987 0.406743 8.78135 6.150088 0.25053 0.657149 0.460239 +2602 19.880342 13.242173 13.756076 5.42 3.610229 24.602943 15.856921 16.835708 6.707529 4.323091 +2604 18.166319 4.690361 1.217048 4.887599 1.261929 22.869148 5.344779 1.957566 6.152882 1.437998 +2607 29.839216 15.583051 6.452943 4.041658 2.110691 36.183178 17.928773 5.963191 4.900934 2.428414 +2611 5.571213 3.339141 1.886452 0.78791 0.472239 5.690994 3.85449 2.805221 0.80485 0.545122 +2613 8.323637 4.738156 3.845586 1.699458 0.967401 11.014104 5.899473 3.615312 2.248778 1.20451 +2615 20.781869 6.140112 4.022067 5.54672 1.638808 20.99999 6.281137 9.276599 5.604937 1.676447 +2616 15.15785 8.048977 6.181367 2.054929 1.091189 15.08648 8.079491 6.058549 2.045254 1.095326 +2619 16.042418 7.231026 2.713535 2.280649 1.027989 16.568885 7.383793 5.48112 2.355494 1.049707 +2621 12.744168 5.657168 3.456798 1.975134 0.876767 14.51381 6.892659 5.475886 2.249399 1.068248 +2623 4.618007 2.003123 1.532132 0.724499 0.314261 4.923437 2.430061 1.893185 0.772417 0.381242 +2625 16.450157 5.91985 5.056037 0.645841 0.232416 17.5417 5.951822 2.619503 0.688696 0.233671 +2626 10.678452 5.821529 3.458965 1.62302 0.884816 11.593153 6.931847 7.658636 1.762046 1.053573 +2628 6.035983 4.305397 6.882843 1.391899 0.992826 7.862572 5.781805 6.886378 1.813111 1.333286 +2629 1.387471 0.858079 3.494542 0.706957 0.437216 1.761902 1.076932 0.777969 0.89774 0.548728 +2632 1.178132 0.851178 0.60139 0.519943 0.375649 1.468405 1.015211 0.612703 0.648048 0.448041 +2634 9.429067 2.776136 7.072728 2.562305 0.754402 12.175178 3.257297 7.823647 3.308548 0.885155 +3006 15.700809 6.431738 0.11491 1.913944 0.784035 15.680476 6.420806 0.136045 1.911465 0.782703 +3023 36.118512 8.782802 1.516696 0.917803 0.223179 37.612903 9.141082 0.876913 0.955777 0.232283 +3026 27.874565 9.64181 0.552781 0.601432 0.208035 28.84915 11.443392 0.692753 0.622459 0.246907 +3802 5.222414 2.846408 1.689062 0.37162 0.202547 4.56378 2.90475 2.204683 0.324753 0.206698 +3807 4.504771 2.336185 1.984129 0.420929 0.218295 3.634057 2.285734 2.559996 0.339569 0.213581 +3808 9.432523 3.265741 2.790162 0.943553 0.326678 8.652291 3.243602 4.568417 0.865505 0.324464 +3815 1.929213 1.558635 0.238714 0.114224 0.092283 1.674065 1.530553 0.295723 0.099117 0.09062 +3819 8.640186 4.719311 3.20979 0.588908 0.321665 6.890982 4.171109 3.253579 0.469684 0.2843 +3822 0.670702 0.670702 20.436857 0.150023 0.150023 0.396015 0.396015 30.088511 0.088581 0.088581 +3824 3.913123 2.470766 5.188486 0.742356 0.468728 3.840031 2.642614 6.103692 0.72849 0.501329 +3825 2.256066 1.733493 0.558567 0.216507 0.166358 1.927388 1.617274 1.139189 0.184965 0.155205 +3830 2.531575 1.820215 5.167457 0.422857 0.304036 1.053102 1.024625 4.35326 0.175903 0.171146 +3837 2.052666 1.402074 0.711552 0.175095 0.119599 1.880039 1.381936 1.105724 0.16037 0.117881 +3846 1.4637 0.906108 1.879752 0.579817 0.358938 1.248501 0.802138 2.536342 0.494571 0.317752 +3853 2.612962 1.911256 0.350943 0.140029 0.102424 2.312376 1.894283 0.285872 0.12392 0.101515 +4213 4.583378 1.818706 0.844267 0.257536 0.102192 6.112205 1.909391 0.966737 0.34344 0.107287 +4233 1.504762 1.212941 0.146048 0.072333 0.058306 1.875474 1.337972 0.15854 0.090153 0.064316 +6907 4.488847 4.265743 0.245541 0.1363 0.129526 3.918849 3.749938 0.157585 0.118993 0.113864 +6924 5.475217 1.839526 0 0.56504 0.189838 3.569857 1.653162 0 0.368408 0.170606 +6931 4.180827 1.577408 0.016724 0.268072 0.101142 2.724205 1.323916 0.02754 0.174674 0.084889 +6937 3.033172 1.308332 1.900714 0.602681 0.259961 2.726991 1.464743 1.969266 0.541844 0.29104 +6938 6.566571 2.415655 1.22201 0.408845 0.150403 6.30377 2.576481 1.972266 0.392483 0.160416 +6944 1.721548 0.401855 5.235179 0.660113 0.154088 1.451497 0.433696 5.133861 0.556565 0.166297 +6945 0.417357 0.152257 0.010553 0.237886 0.086784 0.363063 0.16534 0 0.206939 0.094241 +6948 0.290914 0.290914 0 0.069363 0.069363 0.252393 0.252393 0 0.060178 0.060178 +6949 0.769675 0.769675 0.104251 0.171491 0.171491 0.703884 0.703884 0.156426 0.156832 0.156832 +7405 0.571377 0.55926 0 0.01223 0.011971 0.478807 0.478807 0 0.010249 0.010249 +8405 18.071745 5.524737 0.217301 1.030018 0.314888 20.001247 5.913794 1.481742 1.139992 0.337063 +8406 23.83484 7.417851 3.471819 2.069678 0.644123 25.827535 7.579547 5.790008 2.242712 0.658164 +8409 23.314953 6.075204 0 2.592236 0.675462 28.794183 6.596291 1.454691 3.201435 0.733398 +8413 25.268711 5.301439 103.934619 4.055912 0.850941 26.845659 5.836944 20.693445 4.30903 0.936895 +8415 10.625891 3.446759 0 0.884079 0.286772 14.320716 4.188266 0.007549 1.19149 0.348466 +8416 25.605725 8.581471 6.478532 1.477379 0.495127 28.668487 9.002355 5.641193 1.654092 0.519411 +8418 19.945043 9.805934 0 2.132657 1.048516 22.151765 10.777322 0.338284 2.368615 1.152383 +8423 6.474087 5.1594 42.35799 1.227769 0.978447 6.934897 5.516014 10.204106 1.315159 1.046077 +410 17.250323 9.813118 22.006962 2.078105 1.182163 18.284804 11.713173 4.664238 2.202727 1.411058 +413 2.736102 2.146039 1.084895 0.320749 0.251577 1.598405 1.388573 1.949833 0.187379 0.16278 +414 10.494961 7.626911 25.708648 1.822659 1.324565 10.934246 8.674644 7.82113 1.898949 1.506524 +416 4.408833 3.859476 7.859514 0.709903 0.621446 4.384178 4.007678 5.889341 0.705933 0.645309 +419 4.293424 2.772124 10.025748 0.745777 0.481524 4.686013 3.279044 0.859772 0.81397 0.569577 +420 12.463656 5.815313 0 0.452683 0.211213 13.33114 6.890685 0.098948 0.48419 0.250271 +421 1.357014 0.907012 0.024286 0.163894 0.109545 0.320151 0.26952 0.07234 0.038666 0.032551 +427 6.3445 4.80464 2.329419 1.738939 1.316884 6.621702 4.991039 4.169108 1.814916 1.367974 +429 1.116104 0.938926 3.007488 0.153707 0.129306 0.429404 0.402841 1.904867 0.059136 0.055478 +430 1.281034 0.798282 47.711771 0.17919 0.111663 0.886311 0.652055 9.953381 0.123976 0.091209 +505 6.40637 1.872683 1.608205 0.454588 0.132883 7.768369 2.049089 1.663783 0.551234 0.145401 +509 11.500028 3.492374 13.482833 1.36573 0.41475 14.863508 3.941883 12.239339 1.765173 0.468133 +512 11.154665 5.817607 7.872848 2.002879 1.044582 11.91191 6.313521 10.047226 2.138846 1.133626 +515 1.550146 0.804417 0.200081 0.438568 0.227586 1.771954 0.854771 0.080886 0.501322 0.241832 +516 1.896921 1.204148 2.735729 0.658682 0.418126 2.118177 1.321585 0.809955 0.735511 0.458904 +522 11.503044 6.456056 2.0441 0.947215 0.531622 12.941376 7.375259 2.244727 1.065653 0.607313 +523 9.505007 3.904823 0 0.991099 0.407161 11.32911 4.367832 0.336162 1.1813 0.455439 +524 4.222087 2.932684 3.155805 0.533259 0.370405 4.980051 3.436155 2.6615 0.628992 0.433995 +1326 28.301015 3.838777 34.271376 6.47521 0.878304 29.581315 4.153209 21.891082 6.76814 0.950245 +1327 3.102727 1.894825 85.193852 0.415983 0.254039 2.109945 1.441933 33.38636 0.28288 0.19332 +8319 1.28811 1.065901 61.218048 0.279168 0.231009 0.744935 0.640484 46.070747 0.161447 0.13881 +8408 24.299776 11.805196 0 2.030009 0.986209 24.083254 12.345471 1.45296 2.011921 1.031344 +8411 15.840096 11.986835 119.813887 4.258845 3.222839 17.12065 13.180381 28.910291 4.603141 3.543741 +102 3.098368 2.777448 0.238157 0.171932 0.154124 2.185097 2.081111 0.25469 0.121253 0.115483 +126 5.73744 3.686015 0 0.057658 0.037042 4.112688 2.75678 0 0.04133 0.027704 +135 0.156311 0.12432 0.268024 0.005275 0.004195 0 0 0.067882 0 0 +2103 4.973715 3.506199 0.209419 0.220562 0.155484 2.723294 2.138767 0.390772 0.120766 0.094845 +2114 7.320273 4.462323 0.503048 0.226933 0.138335 2.817054 2.361396 0.513344 0.08733 0.073205 +2134 7.002052 4.909768 0.189952 0.196013 0.137442 3.625175 2.79766 0.29915 0.101482 0.078317 +2138 2.616114 0.649466 0.102905 0.038776 0.009626 1.26512 0.424131 0.059659 0.018751 0.006286 +3909 3.49607 2.140098 0.532057 0.31226 0.191148 1.598229 1.340505 0.877519 0.14275 0.119731 +6905 8.236916 4.906081 0.133114 0.21247 0.126551 6.871773 4.016779 0.178476 0.177256 0.103612 +6910 1.350596 0.667826 0 0.084844 0.041953 0.914552 0.498272 0 0.057452 0.031301 +6925 3.254299 1.354826 0.091914 0.188406 0.078437 2.486744 1.125633 0.035088 0.143969 0.065168 +7116 5.279821 2.99994 0 0.257433 0.146271 3.4792 2.088018 0.130689 0.169638 0.101807 +7151 12.388608 9.961299 0.07691 0.301726 0.242608 8.23871 6.98149 0.266408 0.200654 0.170035 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_064031- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.522172 0.411582 0.573629 0.130332 0.102729 0.201304 0.187345 0.970696 0.050245 0.046761 +117 2.616097 1.823662 8.423438 0.989545 0.689805 1.592056 1.214142 12.412633 0.602199 0.459252 +118 0.387061 0.288226 0.218757 0.052864 0.039365 0.076002 0.076002 0.356935 0.01038 0.01038 +119 3.441443 2.41891 4.191405 0.600666 0.422194 1.912026 1.481573 4.789801 0.333723 0.258592 +120 2.341645 1.116235 3.669709 0.44618 0.212689 1.265057 0.714138 4.539821 0.241046 0.136073 +140 1.033115 0.39982 0.419457 0.299572 0.115936 0.679033 0.341933 0.558762 0.196899 0.09915 +518 3.414007 3.414007 3.911576 0.290986 0.290986 3.754186 3.754186 5.070826 0.319981 0.319981 +717 4.664204 2.750617 0.380052 0.385446 0.227309 4.394603 2.903329 0.300758 0.363167 0.239929 +722 6.644114 3.546696 0.318838 0.539591 0.288039 8.414471 3.914393 0.675474 0.683368 0.317901 +724 8.124416 5.435225 0.107614 0.607987 0.406743 8.78135 6.150088 0.25053 0.657149 0.460239 +2602 19.880342 13.242173 13.756076 5.42 3.610229 24.602943 15.856921 16.835708 6.707529 4.323091 +2604 18.166319 4.690361 1.217048 4.887599 1.261929 22.869148 5.344779 1.957566 6.152882 1.437998 +2607 29.839216 15.583051 6.452943 4.041658 2.110691 36.183178 17.928773 5.963191 4.900934 2.428414 +2611 5.571213 3.339141 1.886452 0.78791 0.472239 5.690994 3.85449 2.805221 0.80485 0.545122 +2613 8.323637 4.738156 3.845586 1.699458 0.967401 11.014104 5.899473 3.615312 2.248778 1.20451 +2615 20.781869 6.140112 4.022067 5.54672 1.638808 20.99999 6.281137 9.276599 5.604937 1.676447 +2616 15.15785 8.048977 6.181367 2.054929 1.091189 15.08648 8.079491 6.058549 2.045254 1.095326 +2619 16.042418 7.231026 2.713535 2.280649 1.027989 16.568885 7.383793 5.48112 2.355494 1.049707 +2621 12.744168 5.657168 3.456798 1.975134 0.876767 14.51381 6.892659 5.475886 2.249399 1.068248 +2623 4.618007 2.003123 1.532132 0.724499 0.314261 4.923437 2.430061 1.893185 0.772417 0.381242 +2625 16.450157 5.91985 5.056037 0.645841 0.232416 17.5417 5.951822 2.619503 0.688696 0.233671 +2626 10.678452 5.821529 3.458965 1.62302 0.884816 11.593153 6.931847 7.658636 1.762046 1.053573 +2628 6.035983 4.305397 6.882843 1.391899 0.992826 7.862572 5.781805 6.886378 1.813111 1.333286 +2629 1.387471 0.858079 3.494542 0.706957 0.437216 1.761902 1.076932 0.777969 0.89774 0.548728 +2632 1.178132 0.851178 0.60139 0.519943 0.375649 1.468405 1.015211 0.612703 0.648048 0.448041 +2634 9.429067 2.776136 7.072728 2.562305 0.754402 12.175178 3.257297 7.823647 3.308548 0.885155 +3006 15.700809 6.431738 0.11491 1.913944 0.784035 15.680476 6.420806 0.136045 1.911465 0.782703 +3023 36.118512 8.782802 1.516696 0.917803 0.223179 37.612903 9.141082 0.876913 0.955777 0.232283 +3026 27.874565 9.64181 0.552781 0.601432 0.208035 28.84915 11.443392 0.692753 0.622459 0.246907 +3802 5.222414 2.846408 1.689062 0.37162 0.202547 4.56378 2.90475 2.204683 0.324753 0.206698 +3807 4.504771 2.336185 1.984129 0.420929 0.218295 3.634057 2.285734 2.559996 0.339569 0.213581 +3808 9.432523 3.265741 2.790162 0.943553 0.326678 8.652291 3.243602 4.568417 0.865505 0.324464 +3815 1.929213 1.558635 0.238714 0.114224 0.092283 1.674065 1.530553 0.295723 0.099117 0.09062 +3819 8.640186 4.719311 3.20979 0.588908 0.321665 6.890982 4.171109 3.253579 0.469684 0.2843 +3822 0.670702 0.670702 20.436857 0.150023 0.150023 0.396015 0.396015 30.088511 0.088581 0.088581 +3824 3.913123 2.470766 5.188486 0.742356 0.468728 3.840031 2.642614 6.103692 0.72849 0.501329 +3825 2.256066 1.733493 0.558567 0.216507 0.166358 1.927388 1.617274 1.139189 0.184965 0.155205 +3830 2.531575 1.820215 5.167457 0.422857 0.304036 1.053102 1.024625 4.35326 0.175903 0.171146 +3837 2.052666 1.402074 0.711552 0.175095 0.119599 1.880039 1.381936 1.105724 0.16037 0.117881 +3846 1.4637 0.906108 1.879752 0.579817 0.358938 1.248501 0.802138 2.536342 0.494571 0.317752 +3853 2.612962 1.911256 0.350943 0.140029 0.102424 2.312376 1.894283 0.285872 0.12392 0.101515 +4213 4.583378 1.818706 0.844267 0.257536 0.102192 6.112205 1.909391 0.966737 0.34344 0.107287 +4233 1.504762 1.212941 0.146048 0.072333 0.058306 1.875474 1.337972 0.15854 0.090153 0.064316 +6907 4.488847 4.265743 0.245541 0.1363 0.129526 3.918849 3.749938 0.157585 0.118993 0.113864 +6924 5.475217 1.839526 0 0.56504 0.189838 3.569857 1.653162 0 0.368408 0.170606 +6931 4.180827 1.577408 0.016724 0.268072 0.101142 2.724205 1.323916 0.02754 0.174674 0.084889 +6937 3.033172 1.308332 1.900714 0.602681 0.259961 2.726991 1.464743 1.969266 0.541844 0.29104 +6938 6.566571 2.415655 1.22201 0.408845 0.150403 6.30377 2.576481 1.972266 0.392483 0.160416 +6944 1.721548 0.401855 5.235179 0.660113 0.154088 1.451497 0.433696 5.133861 0.556565 0.166297 +6945 0.417357 0.152257 0.010553 0.237886 0.086784 0.363063 0.16534 0 0.206939 0.094241 +6948 0.290914 0.290914 0 0.069363 0.069363 0.252393 0.252393 0 0.060178 0.060178 +6949 0.769675 0.769675 0.104251 0.171491 0.171491 0.703884 0.703884 0.156426 0.156832 0.156832 +7405 0.571377 0.55926 0 0.01223 0.011971 0.478807 0.478807 0 0.010249 0.010249 +8405 18.071745 5.524737 0.217301 1.030018 0.314888 20.001247 5.913794 1.481742 1.139992 0.337063 +8406 23.83484 7.417851 3.471819 2.069678 0.644123 25.827535 7.579547 5.790008 2.242712 0.658164 +8409 23.314953 6.075204 0 2.592236 0.675462 28.794183 6.596291 1.454691 3.201435 0.733398 +8413 25.268711 5.301439 103.934619 4.055912 0.850941 26.845659 5.836944 20.693445 4.30903 0.936895 +8415 10.625891 3.446759 0 0.884079 0.286772 14.320716 4.188266 0.007549 1.19149 0.348466 +8416 25.605725 8.581471 6.478532 1.477379 0.495127 28.668487 9.002355 5.641193 1.654092 0.519411 +8418 19.945043 9.805934 0 2.132657 1.048516 22.151765 10.777322 0.338284 2.368615 1.152383 +8423 6.474087 5.1594 42.35799 1.227769 0.978447 6.934897 5.516014 10.204106 1.315159 1.046077 +410 17.250323 9.813118 22.006962 2.078105 1.182163 18.284804 11.713173 4.664238 2.202727 1.411058 +413 2.736102 2.146039 1.084895 0.320749 0.251577 1.598405 1.388573 1.949833 0.187379 0.16278 +414 10.494961 7.626911 25.708648 1.822659 1.324565 10.934246 8.674644 7.82113 1.898949 1.506524 +416 4.408833 3.859476 7.859514 0.709903 0.621446 4.384178 4.007678 5.889341 0.705933 0.645309 +419 4.293424 2.772124 10.025748 0.745777 0.481524 4.686013 3.279044 0.859772 0.81397 0.569577 +420 12.463656 5.815313 0 0.452683 0.211213 13.33114 6.890685 0.098948 0.48419 0.250271 +421 1.357014 0.907012 0.024286 0.163894 0.109545 0.320151 0.26952 0.07234 0.038666 0.032551 +427 6.3445 4.80464 2.329419 1.738939 1.316884 6.621702 4.991039 4.169108 1.814916 1.367974 +429 1.116104 0.938926 3.007488 0.153707 0.129306 0.429404 0.402841 1.904867 0.059136 0.055478 +430 1.281034 0.798282 47.711771 0.17919 0.111663 0.886311 0.652055 9.953381 0.123976 0.091209 +505 6.40637 1.872683 1.608205 0.454588 0.132883 7.768369 2.049089 1.663783 0.551234 0.145401 +509 11.500028 3.492374 13.482833 1.36573 0.41475 14.863508 3.941883 12.239339 1.765173 0.468133 +512 11.154665 5.817607 7.872848 2.002879 1.044582 11.91191 6.313521 10.047226 2.138846 1.133626 +515 1.550146 0.804417 0.200081 0.438568 0.227586 1.771954 0.854771 0.080886 0.501322 0.241832 +516 1.896921 1.204148 2.735729 0.658682 0.418126 2.118177 1.321585 0.809955 0.735511 0.458904 +522 11.503044 6.456056 2.0441 0.947215 0.531622 12.941376 7.375259 2.244727 1.065653 0.607313 +523 9.505007 3.904823 0 0.991099 0.407161 11.32911 4.367832 0.336162 1.1813 0.455439 +524 4.222087 2.932684 3.155805 0.533259 0.370405 4.980051 3.436155 2.6615 0.628992 0.433995 +1326 28.301015 3.838777 34.271376 6.47521 0.878304 29.581315 4.153209 21.891082 6.76814 0.950245 +1327 3.102727 1.894825 85.193852 0.415983 0.254039 2.109945 1.441933 33.38636 0.28288 0.19332 +8319 1.28811 1.065901 61.218048 0.279168 0.231009 0.744935 0.640484 46.070747 0.161447 0.13881 +8408 24.299776 11.805196 0 2.030009 0.986209 24.083254 12.345471 1.45296 2.011921 1.031344 +8411 15.840096 11.986835 119.813887 4.258845 3.222839 17.12065 13.180381 28.910291 4.603141 3.543741 +102 3.098368 2.777448 0.238157 0.171932 0.154124 2.185097 2.081111 0.25469 0.121253 0.115483 +126 5.73744 3.686015 0 0.057658 0.037042 4.112688 2.75678 0 0.04133 0.027704 +135 0.156311 0.12432 0.268024 0.005275 0.004195 0 0 0.067882 0 0 +2103 4.973715 3.506199 0.209419 0.220562 0.155484 2.723294 2.138767 0.390772 0.120766 0.094845 +2114 7.320273 4.462323 0.503048 0.226933 0.138335 2.817054 2.361396 0.513344 0.08733 0.073205 +2134 7.002052 4.909768 0.189952 0.196013 0.137442 3.625175 2.79766 0.29915 0.101482 0.078317 +2138 2.616114 0.649466 0.102905 0.038776 0.009626 1.26512 0.424131 0.059659 0.018751 0.006286 +3909 3.49607 2.140098 0.532057 0.31226 0.191148 1.598229 1.340505 0.877519 0.14275 0.119731 +6905 8.236916 4.906081 0.133114 0.21247 0.126551 6.871773 4.016779 0.178476 0.177256 0.103612 +6910 1.350596 0.667826 0 0.084844 0.041953 0.914552 0.498272 0 0.057452 0.031301 +6925 3.254299 1.354826 0.091914 0.188406 0.078437 2.486744 1.125633 0.035088 0.143969 0.065168 +7116 5.279821 2.99994 0 0.257433 0.146271 3.4792 2.088018 0.130689 0.169638 0.101807 +7151 12.388608 9.961299 0.07691 0.301726 0.242608 8.23871 6.98149 0.266408 0.200654 0.170035 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_092804- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.038675 0.027544 0.573629 0.009653 0.006875 0.010898 0.010898 0.970696 0.00272 0.00272 +117 0.394331 0.27847 8.423438 0.149157 0.105332 0.241043 0.196621 12.412633 0.091175 0.074372 +118 0.035132 0.034197 0.218757 0.004798 0.004671 0 0 0.356935 0 0 +119 0.330485 0.307797 4.191405 0.057683 0.053723 0.253579 0.253579 4.789801 0.04426 0.04426 +120 0.249434 0.219223 3.669709 0.047527 0.041771 0.157417 0.155998 4.539821 0.029994 0.029724 +140 0.336145 0.126592 0.419457 0.097472 0.036708 0.235409 0.071526 0.558762 0.068262 0.02074 +518 1.010173 1.010173 3.911576 0.0861 0.0861 1.319322 1.319322 5.070826 0.11245 0.11245 +717 0.976888 0.78385 0.380052 0.080729 0.064777 0.736042 0.695013 0.300758 0.060826 0.057435 +722 3.110284 0.938037 0.318838 0.252597 0.076181 4.630096 1.038656 0.675474 0.376026 0.084353 +724 2.920256 1.544733 0.107614 0.218536 0.115599 3.004389 1.664069 0.25053 0.224832 0.12453 +2602 15.308788 7.069337 13.756076 4.173652 1.927321 15.308142 8.866723 16.835708 4.173476 2.417345 +2604 21.215305 2.434785 1.217048 5.707921 0.655072 23.681177 3.021655 1.957566 6.371357 0.812968 +2607 26.612278 9.860233 6.452943 3.604576 1.335547 29.66517 11.636738 5.963191 4.018084 1.576171 +2611 3.519411 1.116151 1.886452 0.497734 0.157852 3.204691 1.210114 2.805221 0.453224 0.171141 +2613 4.798906 1.26897 3.845586 0.979805 0.259089 6.072636 1.659159 3.615312 1.239866 0.338755 +2615 20.87723 3.963346 4.022067 5.572172 1.057824 15.387307 3.801594 9.276599 4.106901 1.014653 +2616 16.551489 4.580895 6.181367 2.243863 0.621026 10.175312 4.016601 6.058549 1.379453 0.544525 +2619 21.005241 4.614979 2.713535 2.986182 0.656082 20.973611 4.705196 5.48112 2.981686 0.668908 +2621 10.871056 2.145977 3.456798 1.684833 0.332591 12.144726 2.74839 5.475886 1.882231 0.425955 +2623 3.642063 0.585074 1.532132 0.571388 0.09179 4.882377 0.705542 1.893185 0.765975 0.110689 +2625 8.525637 2.517849 5.056037 0.334721 0.098852 8.207748 2.707307 2.619503 0.32224 0.10629 +2626 8.120689 2.188318 3.458965 1.234265 0.332603 9.851371 2.692578 7.658636 1.497312 0.409246 +2628 2.191922 0.943407 6.882843 0.505458 0.21755 2.944593 1.439133 6.886378 0.679024 0.331864 +2629 1.316258 0.438702 3.494542 0.670671 0.223531 1.661873 0.589866 0.777969 0.846772 0.300554 +2632 0.900983 0.445887 0.60139 0.397629 0.196782 0.894628 0.581997 0.612703 0.394824 0.256852 +2634 11.395395 1.464867 7.072728 3.096645 0.398071 14.125726 1.9197 7.823647 3.8386 0.52167 +3006 19.850646 2.24787 0.11491 2.419813 0.274018 19.172996 2.238892 0.136045 2.337207 0.272923 +3023 40.997408 3.646569 1.516696 1.04178 0.092663 40.191972 3.839307 0.876913 1.021313 0.09756 +3026 18.827517 4.291872 0.552781 0.406229 0.092603 15.371109 5.067849 0.692753 0.331652 0.109346 +3802 1.877964 0.779972 1.689062 0.133634 0.055502 0.62677 0.471609 2.204683 0.0446 0.033559 +3807 1.546681 0.474819 1.984129 0.144523 0.044367 0.25359 0.253263 2.559996 0.023696 0.023665 +3808 5.161426 1.188763 2.790162 0.516307 0.118914 2.723435 1.072718 4.568417 0.27243 0.107306 +3815 0.481722 0.352534 0.238714 0.028522 0.020873 0.213369 0.193297 0.295723 0.012633 0.011445 +3819 4.100841 1.876401 3.20979 0.27951 0.127894 1.892632 1.415694 3.253579 0.129 0.096493 +3822 0.085839 0.085839 20.436857 0.019201 0.019201 0.032444 0.032444 30.088511 0.007257 0.007257 +3824 0.561896 0.339953 5.188486 0.106597 0.064492 0.324189 0.306191 6.103692 0.061502 0.058087 +3825 0.667061 0.361257 0.558567 0.064016 0.034669 0.168042 0.153776 1.139189 0.016126 0.014757 +3830 0.242767 0.214485 5.167457 0.04055 0.035826 0.008025 0.008025 4.35326 0.00134 0.00134 +3837 0.816296 0.300685 0.711552 0.069631 0.025649 0.298395 0.166085 1.105724 0.025454 0.014167 +3846 0.626274 0.201417 1.879752 0.248087 0.079788 0.31956 0.146078 2.536342 0.126588 0.057866 +3853 0.919435 0.421988 0.350943 0.049273 0.022614 0.317018 0.243629 0.285872 0.016989 0.013056 +4213 1.416413 0.460989 0.844267 0.079587 0.025903 1.875577 0.458608 0.966737 0.105387 0.025769 +4233 0.172556 0.172556 0.146048 0.008295 0.008295 0.191447 0.191447 0.15854 0.009203 0.009203 +6907 0.873644 0.873644 0.245541 0.026527 0.026527 0.638854 0.638854 0.157585 0.019398 0.019398 +6924 1.282803 0.555927 0 0.132385 0.057371 0.32139 0.32139 0 0.033167 0.033167 +6931 0.997792 0.410328 0.016724 0.063978 0.02631 0.236281 0.219006 0.02754 0.01515 0.014043 +6937 1.246886 0.331039 1.900714 0.247752 0.065776 0.544007 0.243893 1.969266 0.108092 0.048461 +6938 3.205651 0.824573 1.22201 0.199589 0.051339 2.154332 0.699542 1.972266 0.134132 0.043555 +6944 0.953483 0.114139 5.235179 0.365605 0.043766 0.552359 0.096372 5.133861 0.211798 0.036953 +6945 0.265883 0.045054 0.010553 0.151548 0.02568 0.179103 0.035963 0 0.102086 0.020498 +6948 0.072176 0.072176 0 0.017209 0.017209 0.050352 0.050352 0 0.012006 0.012006 +6949 0.215863 0.215863 0.104251 0.048096 0.048096 0.149703 0.149703 0.156426 0.033355 0.033355 +7405 0.051784 0.051784 0 0.001108 0.001108 0.021369 0.021369 0 0.000457 0.000457 +8405 29.733769 3.927795 0.217301 1.694707 0.223869 32.673113 4.472755 1.481742 1.862237 0.254929 +8406 18.450639 3.091805 3.471819 1.602146 0.268474 20.486082 3.272034 5.790008 1.778892 0.284124 +8409 14.146846 2.614742 0 1.572894 0.290716 19.93567 3.32771 1.454691 2.216515 0.369986 +8413 27.319482 3.287464 103.934619 4.385084 0.527675 31.702627 3.575917 20.693445 5.088628 0.573975 +8415 5.107451 1.910689 0 0.424942 0.15897 7.236265 2.560422 0.007549 0.602061 0.213028 +8416 17.624686 2.195132 6.478532 1.016895 0.126653 22.239359 2.723995 5.641193 1.283149 0.157167 +8418 9.951525 4.359924 0 1.064084 0.466192 11.281004 4.871401 0.338284 1.20624 0.520883 +8423 6.872511 3.629978 42.35799 1.303328 0.688402 7.384199 3.849529 10.204106 1.400366 0.730039 +410 7.722085 4.188782 22.006962 0.930261 0.504613 6.650156 5.148766 4.664238 0.801129 0.62026 +413 1.66379 1.130202 1.084895 0.195044 0.132492 0.687305 0.55744 1.949833 0.080572 0.065348 +414 3.617298 2.618371 25.708648 0.628216 0.454732 3.199771 2.871573 7.82113 0.555704 0.498706 +416 1.777315 1.594935 7.859514 0.28618 0.256814 1.604435 1.570716 5.889341 0.258343 0.252914 +419 1.566143 1.070939 10.025748 0.272042 0.186024 1.382249 1.325161 0.859772 0.240099 0.230183 +420 7.639005 2.480695 0 0.27745 0.090099 7.556421 3.059882 0.098948 0.274451 0.111136 +421 0.095433 0.075117 0.024286 0.011526 0.009072 0 0 0.07234 0 0 +427 2.635913 1.985126 2.329419 0.722467 0.544095 2.211268 2.123202 4.169108 0.606078 0.58194 +429 0.319179 0.289775 3.007488 0.043957 0.039907 0.127915 0.121638 1.904867 0.017616 0.016752 +430 0.841845 0.503834 47.711771 0.117757 0.070476 0.548435 0.383938 9.953381 0.076715 0.053705 +505 5.047848 1.036597 1.608205 0.358189 0.073556 7.81116 1.174313 1.663783 0.55427 0.083328 +509 9.629885 2.0679 13.482833 1.143634 0.245581 15.335355 2.506002 12.239339 1.821209 0.29761 +512 5.818434 2.260436 7.872848 1.04473 0.405873 5.493492 2.415979 10.047226 0.986385 0.433802 +515 0.663143 0.413384 0.200081 0.187617 0.116955 0.744445 0.455372 0.080886 0.210619 0.128834 +516 0.794443 0.525478 2.735729 0.27586 0.182466 0.790071 0.610381 0.809955 0.274343 0.211947 +522 10.049227 4.736774 2.0441 0.8275 0.390048 10.05843 5.053283 2.244727 0.828258 0.416111 +523 7.95972 2.665627 0 0.82997 0.277948 10.590734 2.99104 0.336162 1.104309 0.311879 +524 3.373298 2.064769 3.155805 0.426056 0.260785 3.82297 2.394615 2.6615 0.48285 0.302445 +1326 55.494123 1.452009 34.271376 12.696933 0.332217 56.13918 1.597838 21.891082 12.844521 0.365582 +1327 2.465128 1.06046 85.193852 0.3305 0.142176 1.255579 0.721022 33.38636 0.168336 0.096667 +8319 0.57793 0.362894 61.218048 0.125253 0.078649 0.192989 0.18527 46.070747 0.041826 0.040153 +8408 35.491994 8.824917 0 2.96501 0.737236 32.30718 8.557315 1.45296 2.69895 0.71488 +8411 6.194799 3.906419 119.813887 1.665564 1.050299 6.762951 4.323442 28.910291 1.81832 1.162421 +102 0.343373 0.343336 0.238157 0.019054 0.019052 0.260759 0.260759 0.25469 0.01447 0.01447 +126 0.873495 0.522794 0 0.008778 0.005254 0.680272 0.380439 0 0.006836 0.003823 +135 0 0 0.268024 0 0 0 0 0.067882 0 0 +2103 0.867809 0.491807 0.209419 0.038483 0.021809 0.143598 0.116571 0.390772 0.006368 0.005169 +2114 0.563585 0.351379 0.503048 0.017471 0.010893 0 0 0.513344 0 0 +2134 1.324523 0.739343 0.189952 0.037078 0.020697 0.137124 0.13179 0.29915 0.003839 0.003689 +2138 0.736515 0.090589 0.102905 0.010917 0.001343 0.058612 0.014033 0.059659 0.000869 0.000208 +3909 1.116062 0.537981 0.532057 0.099684 0.048051 0.035765 0.035765 0.877519 0.003194 0.003194 +6905 1.37297 0.685983 0.133114 0.035415 0.017695 1.237468 0.64323 0.178476 0.03192 0.016592 +6910 0.360458 0.101792 0 0.022644 0.006395 0.090738 0.059801 0 0.0057 0.003757 +6925 1.57361 0.443698 0.091914 0.091103 0.025688 0.933493 0.344293 0.035088 0.054044 0.019933 +7116 1.283008 0.485557 0 0.062557 0.023675 0.438551 0.229484 0.130689 0.021383 0.011189 +7151 2.517285 1.817133 0.07691 0.061309 0.044256 0.825616 0.82284 0.266408 0.020108 0.02004 +C:/jamsmodeldata/J2K_Rhone_Irrigation/output/ - 20151106_095821- cantons analyses - en m3 +Canton Demande_87_2007(m3) Transfert_87_2007(m3) Prélèvement_87_2007(m3) Demande_corrigée_87_2007(m3) Transfert_corrigé_87_2007(m3) Demande_2008_2012(m3) Transfert_2008_2012(m3) Prélèvement_2008_2012(m3) Demande_corrigée_2008_2012(m3) Transfert_corrigé_2008_2012(m3) +101 0.038675 0.027544 0.573629 0.009653 0.006875 0.010898 0.010898 0.970696 0.00272 0.00272 +117 0.394331 0.27847 8.423438 0.149157 0.105332 0.241043 0.196621 12.412633 0.091175 0.074372 +118 0.035132 0.034197 0.218757 0.004798 0.004671 0 0 0.356935 0 0 +119 0.330485 0.307797 4.191405 0.057683 0.053723 0.253579 0.253579 4.789801 0.04426 0.04426 +120 0.249434 0.219223 3.669709 0.047527 0.041771 0.157417 0.155998 4.539821 0.029994 0.029724 +140 0.336145 0.126592 0.419457 0.097472 0.036708 0.235409 0.071526 0.558762 0.068262 0.02074 +518 1.010173 1.010173 3.911576 0.0861 0.0861 1.319322 1.319322 5.070826 0.11245 0.11245 +717 0.976888 0.78385 0.380052 0.080729 0.064777 0.736042 0.695013 0.300758 0.060826 0.057435 +722 3.110284 0.938037 0.318838 0.252597 0.076181 4.630096 1.038656 0.675474 0.376026 0.084353 +724 2.920256 1.544733 0.107614 0.218536 0.115599 3.004389 1.664069 0.25053 0.224832 0.12453 +2602 15.308788 7.069337 13.756076 4.173652 1.927321 15.308142 8.866723 16.835708 4.173476 2.417345 +2604 21.215305 2.434785 1.217048 5.707921 0.655072 23.681177 3.021655 1.957566 6.371357 0.812968 +2607 26.612278 9.860233 6.452943 3.604576 1.335547 29.66517 11.636738 5.963191 4.018084 1.576171 +2611 3.519411 1.116151 1.886452 0.497734 0.157852 3.204691 1.210114 2.805221 0.453224 0.171141 +2613 4.798906 1.26897 3.845586 0.979805 0.259089 6.072636 1.659159 3.615312 1.239866 0.338755 +2615 20.87723 3.963346 4.022067 5.572172 1.057824 15.387307 3.801594 9.276599 4.106901 1.014653 +2616 16.551489 4.580895 6.181367 2.243863 0.621026 10.175312 4.016601 6.058549 1.379453 0.544525 +2619 21.005241 4.614979 2.713535 2.986182 0.656082 20.973611 4.705196 5.48112 2.981686 0.668908 +2621 10.871056 2.145977 3.456798 1.684833 0.332591 12.144726 2.74839 5.475886 1.882231 0.425955 +2623 3.642063 0.585074 1.532132 0.571388 0.09179 4.882377 0.705542 1.893185 0.765975 0.110689 +2625 8.525637 2.517849 5.056037 0.334721 0.098852 8.207748 2.707307 2.619503 0.32224 0.10629 +2626 8.120689 2.188318 3.458965 1.234265 0.332603 9.851371 2.692578 7.658636 1.497312 0.409246 +2628 2.191922 0.943407 6.882843 0.505458 0.21755 2.944593 1.439133 6.886378 0.679024 0.331864 +2629 1.316258 0.438702 3.494542 0.670671 0.223531 1.661873 0.589866 0.777969 0.846772 0.300554 +2632 0.900983 0.445887 0.60139 0.397629 0.196782 0.894628 0.581997 0.612703 0.394824 0.256852 +2634 11.395395 1.464867 7.072728 3.096645 0.398071 14.125726 1.9197 7.823647 3.8386 0.52167 +3006 19.850646 2.24787 0.11491 2.419813 0.274018 19.172996 2.238892 0.136045 2.337207 0.272923 +3023 40.997408 3.646569 1.516696 1.04178 0.092663 40.191972 3.839307 0.876913 1.021313 0.09756 +3026 18.827517 4.291872 0.552781 0.406229 0.092603 15.371109 5.067849 0.692753 0.331652 0.109346 +3802 1.877964 0.779972 1.689062 0.133634 0.055502 0.62677 0.471609 2.204683 0.0446 0.033559 +3807 1.546681 0.474819 1.984129 0.144523 0.044367 0.25359 0.253263 2.559996 0.023696 0.023665 +3808 5.161426 1.188763 2.790162 0.516307 0.118914 2.723435 1.072718 4.568417 0.27243 0.107306 +3815 0.481722 0.352534 0.238714 0.028522 0.020873 0.213369 0.193297 0.295723 0.012633 0.011445 +3819 4.100841 1.876401 3.20979 0.27951 0.127894 1.892632 1.415694 3.253579 0.129 0.096493 +3822 0.085839 0.085839 20.436857 0.019201 0.019201 0.032444 0.032444 30.088511 0.007257 0.007257 +3824 0.561896 0.339953 5.188486 0.106597 0.064492 0.324189 0.306191 6.103692 0.061502 0.058087 +3825 0.667061 0.361257 0.558567 0.064016 0.034669 0.168042 0.153776 1.139189 0.016126 0.014757 +3830 0.242767 0.214485 5.167457 0.04055 0.035826 0.008025 0.008025 4.35326 0.00134 0.00134 +3837 0.816296 0.300685 0.711552 0.069631 0.025649 0.298395 0.166085 1.105724 0.025454 0.014167 +3846 0.626274 0.201417 1.879752 0.248087 0.079788 0.31956 0.146078 2.536342 0.126588 0.057866 +3853 0.919435 0.421988 0.350943 0.049273 0.022614 0.317018 0.243629 0.285872 0.016989 0.013056 +4213 1.416413 0.460989 0.844267 0.079587 0.025903 1.875577 0.458608 0.966737 0.105387 0.025769 +4233 0.172556 0.172556 0.146048 0.008295 0.008295 0.191447 0.191447 0.15854 0.009203 0.009203 +6907 0.873644 0.873644 0.245541 0.026527 0.026527 0.638854 0.638854 0.157585 0.019398 0.019398 +6924 1.282803 0.555927 0 0.132385 0.057371 0.32139 0.32139 0 0.033167 0.033167 +6931 0.997792 0.410328 0.016724 0.063978 0.02631 0.236281 0.219006 0.02754 0.01515 0.014043 +6937 1.246886 0.331039 1.900714 0.247752 0.065776 0.544007 0.243893 1.969266 0.108092 0.048461 +6938 3.205651 0.824573 1.22201 0.199589 0.051339 2.154332 0.699542 1.972266 0.134132 0.043555 +6944 0.953483 0.114139 5.235179 0.365605 0.043766 0.552359 0.096372 5.133861 0.211798 0.036953 +6945 0.265883 0.045054 0.010553 0.151548 0.02568 0.179103 0.035963 0 0.102086 0.020498 +6948 0.072176 0.072176 0 0.017209 0.017209 0.050352 0.050352 0 0.012006 0.012006 +6949 0.215863 0.215863 0.104251 0.048096 0.048096 0.149703 0.149703 0.156426 0.033355 0.033355 +7405 0.051784 0.051784 0 0.001108 0.001108 0.021369 0.021369 0 0.000457 0.000457 +8405 29.733769 3.927795 0.217301 1.694707 0.223869 32.673113 4.472755 1.481742 1.862237 0.254929 +8406 18.450639 3.091805 3.471819 1.602146 0.268474 20.486082 3.272034 5.790008 1.778892 0.284124 +8409 14.146846 2.614742 0 1.572894 0.290716 19.93567 3.32771 1.454691 2.216515 0.369986 +8413 27.319482 3.287464 103.934619 4.385084 0.527675 31.702627 3.575917 20.693445 5.088628 0.573975 +8415 5.107451 1.910689 0 0.424942 0.15897 7.236265 2.560422 0.007549 0.602061 0.213028 +8416 17.624686 2.195132 6.478532 1.016895 0.126653 22.239359 2.723995 5.641193 1.283149 0.157167 +8418 9.951525 4.359924 0 1.064084 0.466192 11.281004 4.871401 0.338284 1.20624 0.520883 +8423 6.872511 3.629978 42.35799 1.303328 0.688402 7.384199 3.849529 10.204106 1.400366 0.730039 +410 7.722085 4.188782 22.006962 0.930261 0.504613 6.650156 5.148766 4.664238 0.801129 0.62026 +413 1.66379 1.130202 1.084895 0.195044 0.132492 0.687305 0.55744 1.949833 0.080572 0.065348 +414 3.617298 2.618371 25.708648 0.628216 0.454732 3.199771 2.871573 7.82113 0.555704 0.498706 +416 1.777315 1.594935 7.859514 0.28618 0.256814 1.604435 1.570716 5.889341 0.258343 0.252914 +419 1.566143 1.070939 10.025748 0.272042 0.186024 1.382249 1.325161 0.859772 0.240099 0.230183 +420 7.639005 2.480695 0 0.27745 0.090099 7.556421 3.059882 0.098948 0.274451 0.111136 +421 0.095433 0.075117 0.024286 0.011526 0.009072 0 0 0.07234 0 0 +427 2.635913 1.985126 2.329419 0.722467 0.544095 2.211268 2.123202 4.169108 0.606078 0.58194 +429 0.319179 0.289775 3.007488 0.043957 0.039907 0.127915 0.121638 1.904867 0.017616 0.016752 +430 0.841845 0.503834 47.711771 0.117757 0.070476 0.548435 0.383938 9.953381 0.076715 0.053705 +505 5.047848 1.036597 1.608205 0.358189 0.073556 7.81116 1.174313 1.663783 0.55427 0.083328 +509 9.629885 2.0679 13.482833 1.143634 0.245581 15.335355 2.506002 12.239339 1.821209 0.29761 +512 5.818434 2.260436 7.872848 1.04473 0.405873 5.493492 2.415979 10.047226 0.986385 0.433802 +515 0.663143 0.413384 0.200081 0.187617 0.116955 0.744445 0.455372 0.080886 0.210619 0.128834 +516 0.794443 0.525478 2.735729 0.27586 0.182466 0.790071 0.610381 0.809955 0.274343 0.211947 +522 10.049227 4.736774 2.0441 0.8275 0.390048 10.05843 5.053283 2.244727 0.828258 0.416111 +523 7.95972 2.665627 0 0.82997 0.277948 10.590734 2.99104 0.336162 1.104309 0.311879 +524 3.373298 2.064769 3.155805 0.426056 0.260785 3.82297 2.394615 2.6615 0.48285 0.302445 +1326 55.494123 1.452009 34.271376 12.696933 0.332217 56.13918 1.597838 21.891082 12.844521 0.365582 +1327 2.465128 1.06046 85.193852 0.3305 0.142176 1.255579 0.721022 33.38636 0.168336 0.096667 +8319 0.57793 0.362894 61.218048 0.125253 0.078649 0.192989 0.18527 46.070747 0.041826 0.040153 +8408 35.491994 8.824917 0 2.96501 0.737236 32.30718 8.557315 1.45296 2.69895 0.71488 +8411 6.194799 3.906419 119.813887 1.665564 1.050299 6.762951 4.323442 28.910291 1.81832 1.162421 +102 0.343373 0.343336 0.238157 0.019054 0.019052 0.260759 0.260759 0.25469 0.01447 0.01447 +126 0.873495 0.522794 0 0.008778 0.005254 0.680272 0.380439 0 0.006836 0.003823 +135 0 0 0.268024 0 0 0 0 0.067882 0 0 +2103 0.867809 0.491807 0.209419 0.038483 0.021809 0.143598 0.116571 0.390772 0.006368 0.005169 +2114 0.563585 0.351379 0.503048 0.017471 0.010893 0 0 0.513344 0 0 +2134 1.324523 0.739343 0.189952 0.037078 0.020697 0.137124 0.13179 0.29915 0.003839 0.003689 +2138 0.736515 0.090589 0.102905 0.010917 0.001343 0.058612 0.014033 0.059659 0.000869 0.000208 +3909 1.116062 0.537981 0.532057 0.099684 0.048051 0.035765 0.035765 0.877519 0.003194 0.003194 +6905 1.37297 0.685983 0.133114 0.035415 0.017695 1.237468 0.64323 0.178476 0.03192 0.016592 +6910 0.360458 0.101792 0 0.022644 0.006395 0.090738 0.059801 0 0.0057 0.003757 +6925 1.57361 0.443698 0.091914 0.091103 0.025688 0.933493 0.344293 0.035088 0.054044 0.019933 +7116 1.283008 0.485557 0 0.062557 0.023675 0.438551 0.229484 0.130689 0.021383 0.011189 +7151 2.517285 1.817133 0.07691 0.061309 0.044256 0.825616 0.82284 0.266408 0.020108 0.02004 diff --git a/irrigation-R-codes/Irrigation/HRUs_culture.csv b/irrigation-R-codes/Irrigation/HRUs_culture.csv new file mode 100644 index 0000000000000000000000000000000000000000..b34af923077da681d25904dbd449543aa9751f99 --- /dev/null +++ b/irrigation-R-codes/Irrigation/HRUs_culture.csv @@ -0,0 +1,1390 @@ +HRU;Culture +1581;Maraichage +1617;Maraichage +1618;Maraichage +1623;Maraichage +1646;Maraichage +1658;Maraichage +1661;Maraichage +1667;Maraichage +1672;Maraichage +1679;Maraichage +1688;Maraichage +1691;Maraichage +1692;Maraichage +1693;Maraichage +1713;Maraichage +1743;Maraichage +1760;Maraichage +1765;Maraichage +1766;Maraichage +1773;Maraichage +1774;Maraichage +1784;Maraichage +1785;Maraichage +1799;Maraichage +1814;Maraichage +1819;Maraichage +1823;Maraichage +1835;Maraichage +1839;Maraichage +1843;Maraichage +1849;Maraichage +1867;Maraichage +1878;Maraichage +1879;Maraichage +1880;Maraichage +1881;Maraichage +1887;Maraichage +1912;Maraichage +1917;Maraichage +1930;Maraichage +1944;Maraichage +1948;Maraichage +1950;Maraichage +1951;Maraichage +1957;Maraichage +2013;Maraichage +2025;Maraichage +2026;Maraichage +2128;Maraichage +2153;Mais +2155;Maraichage +2165;Maraichage +2166;Mais +2195;Maraichage +2220;Maraichage +2232;Mais +2242;Maraichage +2243;Maraichage +2248;Maraichage +2265;Maraichage +2267;Maraichage +2268;Mais +2269;Mais +2296;Mais +2304;Mais +2306;Maraichage +2311;Maraichage +2317;Maraichage +2324;Maraichage +2325;Mais +2332;Maraichage +2337;Maraichage +2342;Maraichage +2347;Maraichage +2360;Mais +2368;Mais +2378;Maraichage +2384;Maraichage +2388;Mais +2391;Mais +2397;Maraichage +2399;Mais +2407;Mais +2410;Maraichage +2419;Maraichage +2420;Mais +2429;Maraichage +2430;Maraichage +2435;Maraichage +2440;Maraichage +2449;Maraichage +2455;Maraichage +2459;Maraichage +2462;Mais +2473;Maraichage +2475;Maraichage +2483;Maraichage +2486;Maraichage +2500;Maraichage +2510;Maraichage +2519;Maraichage +2533;Maraichage +2540;Maraichage +2541;Maraichage +2552;Maraichage +2559;Maraichage +2564;Maraichage +2572;Maraichage +2589;Maraichage +2607;Maraichage +2630;Maraichage +3143;Maraichage +3148;Maraichage +3167;Maraichage +3168;Maraichage +3186;Maraichage +3222;Maraichage +3223;Maraichage +3234;Maraichage +3263;Maraichage +3283;Maraichage +3302;Maraichage +3356;Maraichage +3412;Maraichage +3483;Maraichage +3593;Maraichage +3617;Maraichage +3641;Maraichage +3682;Maraichage +3714;Maraichage +3728;Maraichage +3739;Maraichage +3754;Maraichage +3780;Maraichage +3805;Maraichage +3824;Maraichage +3886;Maraichage +3892;Maraichage +3902;Maraichage +3903;Maraichage +3909;Maraichage +3934;Maraichage +3994;Maraichage +3995;Maraichage +4031;Maraichage +4048;Maraichage +4083;Maraichage +4106;Maraichage +4114;Maraichage +4133;Maraichage +4142;Maraichage +4148;Maraichage +4210;Maraichage +4228;Maraichage +4288;Maraichage +4289;Maraichage +4296;Maraichage +4304;Maraichage +4333;Maraichage +4528;Maraichage +4569;Maraichage +4585;Maraichage +4662;Maraichage +4833;Maraichage +4840;Maraichage +4857;Maraichage +4886;Maraichage +4930;Maraichage +4986;Maraichage +4996;Maraichage +5008;Maraichage +5050;Maraichage +5067;Mais +5090;Mais +5096;Maraichage +5113;Mais +5117;Mais +5122;Maraichage +5123;Maraichage +5132;Maraichage +5133;Mais +5141;Maraichage +5163;Mais +5180;Maraichage +5190;Mais +5221;Maraichage +5226;Mais +5230;Maraichage +5231;Maraichage +5255;Maraichage +5268;Maraichage +5282;Mais +5304;Mais +5329;Maraichage +5331;Mais +5379;Mais +5414;Mais +5416;Mais +5442;Mais +5467;Mais +5517;Mais +5518;Mais +5526;Mais +5533;Mais +5540;Mais +5545;Mais +5551;Mais +5564;Mais +5565;Mais +5567;Mais +5568;Mais +5583;Mais +5598;Mais +5599;Mais +5606;Mais +5613;Mais +5619;Mais +5646;Mais +5653;Mais +5654;Mais +5659;Mais +5666;Mais +5679;Mais +5684;Mais +5688;Mais +5707;Mais +5715;Mais +5717;Mais +5718;Mais +5729;Mais +5738;Mais +5743;Mais +5748;Mais +5750;Mais +5757;Mais +5765;Mais +5769;Mais +5777;Mais +5778;Mais +5785;Mais +5788;Mais +5789;Mais +5797;Mais +5798;Mais +5802;Mais +5805;Mais +5806;Mais +5821;Vergers +5824;Mais +5846;Mais +5847;Mais +5852;Mais +5857;Mais +5859;Mais +5860;Mais +5862;Mais +5874;Mais +5878;Vergers +5879;Vergers +5880;Mais +5884;Mais +5885;Mais +5892;Mais +5893;Mais +5898;Mais +5908;Mais +5909;Mais +5919;Mais +5934;Mais +5940;Vergers +5943;Mais +5944;Mais +5962;Mais +5965;Mais +5967;Mais +5968;Mais +5975;Mais +5987;Mais +5995;Mais +5996;Mais +6004;Mais +6007;Mais +6008;Mais +6012;Mais +6019;Mais +6030;Mais +6038;Mais +6042;Mais +6043;Mais +6044;Mais +6050;Mais +6051;Mais +6052;Mais +6060;Mais +6062;Mais +6063;Mais +6073;Mais +6086;Mais +6087;Mais +6093;Mais +6106;Mais +6109;Mais +6115;Mais +6116;Mais +6122;Mais +6133;Mais +6137;Mais +6138;Mais +6145;Vergers +6146;Vergers +6147;Mais +6149;Mais +6150;Mais +6153;Mais +6154;Mais +6163;Mais +6168;Mais +6171;Mais +6175;Mais +6178;Mais +6179;Mais +6183;Vergers +6185;Mais +6188;Mais +6191;Vergers +6193;Mais +6206;Mais +6211;Vergers +6214;Mais +6217;Mais +6218;Mais +6219;Mais +6225;Vergers +6227;Mais +6231;Mais +6234;Mais +6235;Mais +6239;Vergers +6240;Mais +6243;Mais +6247;Mais +6254;Mais +6255;Mais +6256;Mais +6263;Mais +6268;Mais +6278;Mais +6281;Vergers +6283;Mais +6288;Vergers +6289;Mais +6297;Vergers +6305;Mais +6308;Mais +6309;Mais +6316;Mais +6321;Mais +6327;Mais +6337;Vergers +6338;Vergers +6339;Mais +6340;Mais +6350;Vergers +6352;Mais +6353;Mais +6359;Mais +6360;Mais +6361;Mais +6366;Mais +6371;Mais +6376;Vergers +6377;Mais +6378;Mais +6379;Mais +6382;Vergers +6386;Mais +6392;Mais +6393;Mais +6394;Mais +6396;Mais +6401;Vergers +6402;Mais +6403;Mais +6408;Vergers +6409;Mais +6411;Mais +6412;Mais +6419;Mais +6420;Mais +6426;Vergers +6427;Mais +6434;Mais +6441;Mais +6443;Mais +6444;Mais +6447;Vergers +6449;Mais +6450;Mais +6452;Vergers +6454;Mais +6459;Mais +6461;Mais +6467;Vergers +6468;Mais +6470;Mais +6480;Vergers +6481;Mais +6483;Mais +6484;Mais +6490;Mais +6491;Mais +6494;Mais +6496;Mais +6497;Mais +6508;Mais +6509;Mais +6512;Mais +6513;Mais +6514;Mais +6518;Mais +6527;Mais +6530;Vergers +6538;Mais +6545;Mais +6551;Mais +6555;Mais +6557;Mais +6558;Mais +6564;Mais +6565;Mais +6572;Vergers +6573;Vergers +6574;Mais +6584;Vergers +6587;Mais +6588;Mais +6604;Vergers +6606;Mais +6608;Mais +6611;Mais +6626;Vergers +6632;Mais +6638;Vergers +6641;Mais +6648;Mais +6653;Mais +6674;Mais +6684;Vergers +6686;Mais +6698;Mais +6711;Maraichage +6712;Mais +6727;Mais +6748;Mais +6753;Mais +6772;Mais +6780;Vergers +6809;Maraichage +6824;Maraichage +6837;Maraichage +6844;Vergers +6857;Vergers +6868;Maraichage +6881;Maraichage +6911;Vergers +6934;Vergers +6935;Maraichage +6943;Mais +6957;Maraichage +6958;Maraichage +6967;Vergers +6983;Vergers +6988;Vergers +7042;Mais +7047;Mais +7053;Vergers +7060;Mais +7061;Mais +7066;Mais +7070;Vergers +7071;Vergers +7076;Mais +7084;Mais +7091;Mais +7095;Vergers +7103;Vergers +7104;Mais +7111;Mais +7115;Mais +7119;Vergers +7120;Vergers +7121;Mais +7122;Mais +7134;Vergers +7135;Vergers +7136;Mais +7144;Vergers +7146;Mais +7147;Mais +7155;Mais +7156;Mais +7161;Mais +7167;Vergers +7175;Vergers +7176;Mais +7177;Mais +7178;Mais +7182;Mais +7192;Mais +7197;Vergers +7198;Mais +7204;Mais +7209;Mais +7219;Vergers +7220;Mais +7221;Mais +7222;Mais +7227;Mais +7236;Mais +7237;Mais +7238;Mais +7239;Mais +7249;Vergers +7250;Vergers +7258;Vergers +7259;Mais +7265;Vergers +7268;Vergers +7270;Mais +7282;Mais +7291;Vergers +7292;Mais +7301;Vergers +7302;Mais +7303;Mais +7304;Mais +7305;Mais +7307;Mais +7309;Vergers +7315;Vergers +7332;Mais +7339;Mais +7343;Vergers +7344;Mais +7347;Mais +7353;Vergers +7363;Mais +7365;Vergers +7366;Mais +7374;Vergers +7379;Vergers +7380;Vergers +7384;Vergers +7390;Mais +7392;Mais +7396;Vergers +7397;Mais +7398;Mais +7409;Mais +7412;Vergers +7422;Vergers +7423;Mais +7436;Mais +7440;Vergers +7450;Vergers +7462;Mais +7469;Vergers +7470;Vergers +7471;Mais +7474;Mais +7480;Mais +7483;Vergers +7484;Mais +7485;Mais +7489;Vergers +7494;Vergers +7495;Mais +7497;Mais +7509;Mais +7518;Vergers +7524;Mais +7539;Mais +7540;Mais +7545;Mais +7546;Mais +7556;Vergers +7567;Mais +7568;Mais +7572;Vergers +7573;Vergers +7576;Mais +7577;Mais +7586;Vergers +7587;Mais +7588;Mais +7589;Vergers +7590;Mais +7591;Mais +7599;Mais +7616;Mais +7624;Mais +7625;Mais +7641;Mais +7648;Mais +7649;Mais +7657;Mais +7663;Mais +7668;Mais +7671;Mais +7678;Mais +7679;Mais +7684;Mais +7685;Mais +7689;Mais +7693;Mais +7694;Vergers +7709;Vergers +7715;Mais +7716;Mais +7724;Mais +7729;Vergers +7743;Mais +7750;Mais +7751;Mais +7759;Mais +7765;Mais +7770;Mais +7775;Vergers +7779;Mais +7780;Mais +7787;Vergers +7788;Vergers +7792;Vergers +7802;Vergers +7808;Vergers +7809;Vergers +7816;Mais +7819;Vergers +7820;Mais +7821;Mais +7822;Vergers +7826;Mais +7842;Mais +7843;Mais +7848;Mais +7849;Vergers +7857;Mais +7858;Vergers +7863;Mais +7867;Mais +7871;Vergers +7876;Mais +7877;Vergers +7886;Vergers +7892;Mais +7893;Vergers +7894;Mais +7896;Mais +7906;Mais +7912;Mais +7913;Vergers +7914;Vergers +7915;Vergers +7921;Vergers +7925;Vergers +7929;Vergers +7930;Mais +7933;Mais +7938;Mais +7939;Vergers +7950;Mais +7951;Mais +7955;Vergers +7956;Mais +7957;Mais +7961;Vergers +7962;Mais +7963;Mais +7967;Vergers +7968;Vergers +7969;Vergers +7975;Mais +7976;Vergers +7980;Mais +7984;Mais +7985;Mais +7992;Mais +7994;Vergers +7999;Vergers +8000;Mais +8003;Mais +8010;Mais +8012;Vergers +8013;Vergers +8014;Vergers +8020;Mais +8021;Vergers +8024;Vergers +8034;Vergers +8039;Vergers +8040;Vergers +8041;Vergers +8048;Vergers +8049;Vergers +8050;Mais +8052;Mais +8053;Mais +8054;Vergers +8055;Vergers +8056;Mais +8058;Vergers +8061;Mais +8078;Vergers +8090;Vergers +8094;Vergers +8095;Mais +8104;Mais +8115;Mais +8118;Mais +8121;Vergers +8124;Vergers +8125;Vergers +8126;Vergers +8128;Vergers +8129;Mais +8133;Vergers +8134;Mais +8135;Mais +8154;Vergers +8160;Vergers +8167;Vergers +8168;Mais +8178;Mais +8179;Mais +8181;Vergers +8182;Mais +8185;Mais +8186;Mais +8203;Vergers +8204;Mais +8206;Mais +8225;Vergers +8226;Vergers +8227;Mais +8230;Vergers +8236;Vergers +8237;Vergers +8238;Mais +8246;Mais +8253;Mais +8258;Mais +8263;Vergers +8280;Mais +8286;Mais +8289;Mais +8294;Mais +8298;Mais +8310;Mais +8311;Mais +8316;Mais +8322;Mais +8337;Mais +8343;Mais +8360;Mais +8377;Mais +8391;Mais +8413;Mais +8431;Mais +8436;Mais +8441;Mais +8442;Mais +8449;Mais +8450;Mais +8453;Mais +8459;Mais +8480;Mais +8496;Mais +8543;Mais +8550;Mais +8558;Mais +8559;Mais +8562;Mais +8567;Mais +8575;Mais +8594;Mais +8609;Mais +8614;Mais +8640;Mais +8644;Mais +8651;Mais +8657;Mais +8678;Mais +8686;Mais +8700;Mais +8712;Mais +8727;Mais +8741;Mais +8765;Mais +8779;Mais +8783;Mais +8784;Mais +8803;Vergers +8804;Mais +8823;Mais +8849;Mais +8854;Mais +8870;Mais +8871;Mais +8897;Mais +8905;Vergers +8921;Mais +8926;Mais +8942;Vergers +8943;Mais +8957;Vergers +8962;Vergers +8982;Mais +8984;Vergers +8985;Mais +8986;Mais +9010;Mais +9027;Prairies +9033;Vergers +9034;Mais +9035;Mais +9043;Mais +9044;Prairies +9061;Vergers +9063;Mais +9064;Mais +9079;Mais +9088;Prairies +9096;Mais +9099;Prairies +9107;Vergers +9108;Mais +9117;Mais +9144;Mais +9145;Mais +9148;Prairies +9159;Prairies +9168;Mais +9171;Mais +9172;Mais +9173;Mais +9186;Prairies +9188;Mais +9193;Vergers +9194;Vergers +9195;Mais +9201;Mais +9225;Prairies +9236;Prairies +9247;Prairies +9248;Prairies +9249;Prairies +9263;Mais +9270;Mais +9273;Prairies +9300;Prairies +9301;Prairies +9304;Vergers +9311;Vergers +9326;Prairies +9327;Prairies +9336;Prairies +9350;Prairies +9356;Mais +9362;Mais +9403;Prairies +9410;Mais +9419;Mais +9421;Prairies +9443;Mais +9444;Mais +9449;Mais +9451;Mais +9458;Prairies +9463;Mais +9466;Prairies +9472;Prairies +9482;Mais +9492;Mais +9493;Mais +9509;Mais +9514;Prairies +9515;Mais +9522;Mais +9525;Prairies +9527;Prairies +9535;Mais +9536;Mais +9542;Prairies +9562;Prairies +9573;Prairies +9578;Prairies +9595;Mais +9598;Prairies +9614;Mais +9619;Mais +9624;Mais +9627;Prairies +9630;Mais +9634;Mais +9638;Prairies +9654;Prairies +9655;Prairies +9656;Prairies +9666;Prairies +9670;Mais +9673;Prairies +9679;Prairies +9680;Prairies +9689;Prairies +9697;Mais +9700;Prairies +9718;Mais +9721;Prairies +9729;Prairies +9730;Prairies +9731;Prairies +9733;Mais +9734;Prairies +9751;Mais +9753;Prairies +9770;Prairies +9775;Mais +9808;Prairies +9810;Prairies +9825;Mais +9830;Mais +9842;Prairies +9847;Mais +9850;Prairies +9852;Mais +9860;Prairies +9871;Mais +9880;Mais +9882;Prairies +9890;Prairies +9891;Mais +9926;Prairies +9938;Mais +9940;Prairies +9949;Prairies +9951;Prairies +9990;Prairies +10006;Prairies +10007;Prairies +10016;Prairies +10018;Prairies +10022;Mais +10025;Prairies +10029;Prairies +10063;Prairies +10087;Prairies +10094;Prairies +10120;Mais +10130;Mais +10134;Vergers +10156;Prairies +10157;Prairies +10158;Vergers +10186;Prairies +10195;Vergers +10201;Mais +10224;Vergers +10228;Mais +10234;Mais +10236;Vergers +10246;Prairies +10267;Mais +10276;Mais +10284;Vergers +10288;Vergers +10293;Mais +10302;Mais +10317;Vigne +10327;Vergers +10332;Vergers +10358;Vigne +10364;Mais +10378;Vigne +10379;Vergers +10380;Mais +10395;Vergers +10398;Vergers +10405;Mais +10410;Vergers +10411;Vergers +10415;Mais +10426;Vigne +10429;Vigne +10438;Vigne +10442;Vergers +10452;Mais +10453;Vigne +10456;Vergers +10457;Vergers +10463;Vergers +10468;Vigne +10490;Vergers +10500;Vigne +10512;Vigne +10520;Vigne +10533;Vergers +10537;Vergers +10545;Vergers +10552;Vergers +10557;Vergers +10558;Vigne +10566;Vigne +10588;Vigne +10604;Vergers +10609;Vigne +10615;Vergers +10620;Vergers +10634;Vigne +10636;Vergers +10640;Vergers +10645;Vergers +10649;Vigne +10657;Vergers +10674;Vergers +10679;Vigne +10683;Vergers +10697;Vergers +10703;Vigne +10711;Vergers +10712;Vergers +10722;Vergers +10732;Vergers +10733;Vergers +10734;Vergers +10745;Vergers +10749;Vergers +10757;Vergers +10758;Vergers +10785;Vergers +10792;Vergers +10827;Vergers +10834;Vergers +10835;Vergers +10846;Vergers +10879;Vergers +10893;Vigne +10894;Vigne +10908;Vergers +10931;Vigne +10938;Vigne +10947;Vigne +10970;Vigne +10971;Vigne +10974;Ble dur +10979;Ble dur +11004;Vigne +11026;Vigne +11027;Vigne +11032;Vigne +11057;Vigne +11063;Vigne +11069;Vigne +11075;Vigne +11082;Ble dur +11116;Ble dur +11120;Vigne +11136;Vigne +11143;Vigne +11166;Vigne +11167;Vigne +11175;Vigne +11179;Vigne +11180;Vigne +11188;Vigne +11189;Vigne +11190;Vigne +11191;Vigne +11198;Mais +11208;Vigne +11216;Vigne +11223;Vigne +11224;Vigne +11227;Ble dur +11232;Vigne +11238;Vigne +11245;Vigne +11246;Mais +11261;Vigne +11262;Vigne +11266;Vigne +11267;Vigne +11274;Maraichage +11275;Vigne +11276;Vigne +11281;Vigne +11283;Ble dur +11305;Vigne +11313;Maraichage +11314;Maraichage +11315;Mais +11316;Vigne +11319;Vigne +11320;Vigne +11321;Vigne +11333;Mais +11349;Vigne +11350;Vigne +11354;Vigne +11355;Mais +11363;Maraichage +11366;Maraichage +11367;Maraichage +11368;Vigne +11374;Mais +11375;Vigne +11390;Vigne +11398;Vigne +11401;Vergers +11406;Maraichage +11407;Vigne +11408;Vigne +11416;Maraichage +11417;Vigne +11419;Vigne +11425;Maraichage +11426;Vigne +11434;Vigne +11450;Maraichage +11453;Maraichage +11454;Vigne +11455;Vigne +11460;Vigne +11468;Maraichage +11469;Vigne +11470;Vigne +11473;Vigne +11483;Maraichage +11485;Maraichage +11486;Mais +11491;Mais +11493;Vergers +11499;Maraichage +11500;Maraichage +11510;Maraichage +11512;Mais +11521;Vigne +11523;Vergers +11526;Mais +11534;Vergers +11536;Maraichage +11540;Vigne +11543;Maraichage +11546;Mais +11547;Vigne +11562;Vigne +11571;Maraichage +11572;Vigne +11574;Vergers +11578;Vigne +11583;Maraichage +11584;Vigne +11586;Maraichage +11588;Vergers +11598;Maraichage +11606;Prairies +11607;Maraichage +11608;Mais +11613;Maraichage +11617;Maraichage +11622;Maraichage +11625;Maraichage +11626;Vergers +11630;Maraichage +11631;Maraichage +11635;Maraichage +11639;Maraichage +11644;Vergers +11649;Mais +11651;Maraichage +11652;Maraichage +11655;Maraichage +11656;Maraichage +11670;Vergers +11673;Maraichage +11675;Maraichage +11680;Maraichage +11690;Vergers +11694;Vergers +11700;Maraichage +11706;Maraichage +11711;Maraichage +11712;Vergers +11716;Vergers +11721;Maraichage +11726;Maraichage +11734;Prairies +11739;Maraichage +11742;Maraichage +11746;Maraichage +11750;Maraichage +11751;Maraichage +11753;Vergers +11758;Maraichage +11763;Maraichage +11782;Vergers +11785;Maraichage +11786;Vergers +11790;Maraichage +11797;Vergers +11799;Vergers +11804;Maraichage +11807;Vergers +11811;Maraichage +11814;Maraichage +11818;Vergers +11819;Vergers +11823;Maraichage +11827;Vergers +11828;Vergers +11829;Vergers +11832;Ble dur +11834;Maraichage +11841;Vergers +11851;Maraichage +11852;Ble dur +11855;Vergers +11857;Maraichage +11858;Ble dur +11862;Prairies +11864;Vergers +11865;Maraichage +11872;Vergers +11876;Maraichage +11877;Vergers +11878;Ble dur +11881;Vergers +11885;Ble dur +11887;Vergers +11889;Vergers +11897;Vergers +11902;Vergers +11903;Ble dur +11904;Ble dur +11909;Vergers +11910;Vergers +11919;Vergers +11922;Vergers +11923;Vergers +11927;Vergers +11928;Maraichage +11931;Vergers +11932;Ble dur +11935;Prairies +11937;Maraichage +11938;Vergers +11940;Ble dur +11942;Vergers +11944;Maraichage +11948;Ble dur +11949;Ble dur +11950;Vergers +11951;Vergers +11952;Vergers +11957;Ble dur +11961;Maraichage +11963;Ble dur +11968;Ble dur +11971;Vergers +11978;Maraichage +11980;Vergers +11983;Ble dur +11984;Ble dur +11985;Vergers +11987;Vergers +11992;Ble dur +11993;Ble dur +12000;Maraichage +12003;Vergers +12005;Ble dur +12006;Maraichage +12009;Ble dur +12017;Ble dur +12022;Vergers +12023;Maraichage +12028;Ble dur +12033;Vergers +12036;Ble dur +12039;Vergers +12040;Vergers +12046;Vergers +12058;Vergers +12059;Vergers +12063;Vergers +12066;Ble dur +12073;Ble dur +12074;Ble dur +12078;Vergers +12079;Vergers +12084;Vergers +12087;Ble dur +12091;Ble dur +12093;Vergers +12106;Vergers +12107;Ble dur +12109;Vergers +12114;Vergers +12115;Vergers +12120;Ble dur +12125;Ble dur +12126;Ble dur +12129;Vergers +12130;Vergers +12132;Ble dur +12142;Vergers +12144;Ble dur +12146;Vergers +12150;Ble dur +12151;Vergers +12153;Ble dur +12155;Vergers +12167;Vergers +12173;Vergers +12174;Vergers +12180;Prairies +12184;Vergers +12186;Ble dur +12196;Ble dur +12202;Vergers +12204;Vergers +12210;Vergers +12212;Ble dur +12214;Vergers +12215;Prairies +12219;Vergers +12241;Vergers +12242;Ble dur +12246;Prairies +12256;Vergers +12260;Vergers +12262;Prairies +12267;Prairies +12270;Prairies +12272;Prairies +12273;Prairies +12274;Prairies +12275;Prairies +12276;Prairies +12281;Prairies +12286;Prairies +12289;Vergers +12290;Prairies +12296;Prairies +12298;Ble dur +12301;Vergers +12303;Ble dur +12307;Prairies +12308;Vergers +12313;Prairies +12314;Ble dur +12317;Prairies +12319;Vergers +12320;Prairies +12329;Prairies +12330;Prairies +12337;Prairies +12342;Prairies +12345;Prairies +12353;Prairies +12356;Ble dur +12364;Prairies +12375;Prairies +12378;Prairies +12393;Prairies +12394;Ble dur +12397;Ble dur +12408;Ble dur +12409;Prairies +12410;Ble dur +12412;Ble dur +12415;Ble dur +12417;Ble dur +12422;Ble dur +12423;Ble dur +12427;Ble dur +12430;Ble dur +12435;Ble dur +12437;Ble dur +12445;Ble dur +12446;Ble dur diff --git a/irrigation-R-codes/Irrigation/HRUs_culture_test.csv b/irrigation-R-codes/Irrigation/HRUs_culture_test.csv new file mode 100644 index 0000000000000000000000000000000000000000..576371d7247668b398be2c084220ec9ef96551be --- /dev/null +++ b/irrigation-R-codes/Irrigation/HRUs_culture_test.csv @@ -0,0 +1,1384 @@ +HRU;Culture;Canton +1581;Maraichage;2114 +1617;Maraichage;2114 +1618;Maraichage;2114 +1623;Maraichage;2114 +1646;Maraichage;2114 +1658;Maraichage;2138 +1661;Maraichage;2138 +1667;Maraichage;2138 +1672;Maraichage;2138 +1679;Maraichage;2114 +1688;Maraichage;2114 +1691;Maraichage;2138 +1692;Maraichage;2114 +1693;Maraichage;2114 +1713;Maraichage;2114 +1743;Maraichage;2103 +1760;Maraichage;2103 +1765;Maraichage;2103 +1766;Maraichage;2103 +1773;Maraichage;2114 +1774;Maraichage;2114 +1784;Maraichage;2114 +1785;Maraichage;2114 +1799;Maraichage;2114 +1814;Maraichage;2114 +1819;Maraichage;2114 +1823;Maraichage;2103 +1835;Maraichage;2114 +1839;Maraichage;2114 +1843;Maraichage;2114 +1849;Maraichage;2103 +1867;Maraichage;2103 +1878;Maraichage;2114 +1879;Maraichage;2114 +1880;Maraichage;2103 +1881;Maraichage;2103 +1887;Maraichage;2114 +1912;Maraichage;2103 +1917;Maraichage;2103 +1930;Maraichage;2114 +1944;Maraichage;2103 +1948;Maraichage;2114 +1950;Maraichage;2103 +1951;Maraichage;2103 +1957;Maraichage;2103 +2013;Maraichage;2103 +2025;Maraichage;2103 +2026;Maraichage;2103 +2128;Maraichage;2134 +2153;Mais;3909 +2155;Maraichage;2134 +2165;Maraichage;2134 +2166;Mais;3909 +2195;Maraichage;2134 +2220;Maraichage;2134 +2232;Mais;3909 +2242;Maraichage;2134 +2243;Maraichage;2134 +2248;Maraichage;2134 +2265;Maraichage;2134 +2267;Maraichage;2134 +2268;Mais;3909 +2269;Mais;3909 +2296;Mais;3909 +2304;Mais;3909 +2306;Maraichage;2134 +2311;Maraichage;2134 +2317;Maraichage;2134 +2324;Maraichage;2134 +2325;Mais;3909 +2332;Maraichage;2134 +2337;Maraichage;2134 +2342;Maraichage;2134 +2347;Maraichage;2134 +2360;Mais;3909 +2368;Mais;3909 +2378;Maraichage;2134 +2384;Maraichage;2134 +2388;Mais;3909 +2391;Mais;3909 +2397;Maraichage;7151 +2399;Mais;3909 +2407;Mais;3909 +2410;Maraichage;7151 +2419;Maraichage;7151 +2420;Mais;3909 +2429;Maraichage;7151 +2430;Maraichage;7151 +2435;Maraichage;7151 +2440;Maraichage;7151 +2449;Maraichage;7151 +2455;Maraichage;7151 +2459;Maraichage;7151 +2462;Mais;3909 +2473;Maraichage;7151 +2475;Maraichage;7151 +2483;Maraichage;7151 +2486;Maraichage;7151 +2500;Maraichage;7151 +2510;Maraichage;7151 +2519;Maraichage;7151 +2533;Maraichage;7151 +2540;Maraichage;7151 +2541;Maraichage;7151 +2552;Maraichage;7151 +2559;Maraichage;7151 +2564;Maraichage;7151 +2572;Maraichage;7151 +2589;Maraichage;7151 +2607;Maraichage;7151 +2630;Maraichage;7151 +3143;Maraichage;7116 +3148;Maraichage;7116 +3167;Maraichage;7116 +3168;Maraichage;7116 +3186;Maraichage;7116 +3222;Maraichage;7116 +3223;Maraichage;7116 +3234;Maraichage;7116 +3263;Maraichage;7116 +3283;Maraichage;7116 +3302;Maraichage;7116 +3356;Maraichage;7116 +3412;Maraichage;126 +3483;Maraichage;126 +3593;Maraichage;126 +3617;Maraichage;126 +3641;Maraichage;126 +3682;Maraichage;126 +3714;Maraichage;126 +3728;Maraichage;126 +3739;Maraichage;126 +3754;Maraichage;126 +3780;Maraichage;126 +3805;Maraichage;126 +3824;Maraichage;126 +3886;Maraichage;126 +3892;Maraichage;102 +3902;Maraichage;126 +3903;Maraichage;126 +3909;Maraichage;126 +3934;Maraichage;126 +3994;Maraichage;126 +3995;Maraichage;126 +4031;Maraichage;102 +4048;Maraichage;126 +4083;Maraichage;102 +4106;Maraichage;102 +4114;Maraichage;102 +4133;Maraichage;102 +4142;Maraichage;102 +4148;Maraichage;102 +4210;Maraichage;102 +4228;Maraichage;102 +4288;Maraichage;102 +4289;Maraichage;102 +4296;Maraichage;102 +4304;Maraichage;102 +4333;Maraichage;7405 +4528;Maraichage;7405 +4569;Maraichage;7405 +4585;Maraichage;7405 +4662;Maraichage;7405 +4833;Maraichage;6905 +4840;Maraichage;6905 +4857;Maraichage;6905 +4886;Maraichage;6905 +4930;Maraichage;6905 +4986;Maraichage;6905 +4996;Maraichage;6905 +5008;Maraichage;6905 +5050;Maraichage;6905 +5067;Mais;135 +5090;Mais;135 +5096;Maraichage;6905 +5113;Mais;135 +5117;Mais;135 +5122;Maraichage;6905 +5123;Maraichage;6905 +5132;Maraichage;6905 +5133;Mais;135 +5141;Maraichage;6905 +5163;Mais;135 +5180;Maraichage;6905 +5190;Mais;135 +5221;Maraichage;6905 +5226;Mais;135 +5230;Maraichage;6905 +5231;Maraichage;6905 +5255;Maraichage;6905 +5268;Maraichage;6905 +5282;Mais;135 +5304;Mais;135 +5329;Maraichage;6905 +5331;Mais;135 +5379;Mais;135 +5414;Mais;135 +5416;Mais;101 +5442;Mais;135 +5467;Mais;135 +5517;Mais;135 +5518;Mais;101 +5526;Mais;135 +5533;Mais;135 +5540;Mais;119 +5545;Mais;101 +5551;Mais;119 +5564;Mais;135 +5565;Mais;135 +5567;Mais;101 +5568;Mais;101 +5583;Mais;119 +5598;Mais;135 +5599;Mais;117 +5606;Mais;119 +5613;Mais;135 +5619;Mais;119 +5646;Mais;135 +5653;Mais;119 +5654;Mais;119 +5659;Mais;135 +5666;Mais;119 +5679;Mais;117 +5684;Mais;101 +5688;Mais;120 +5707;Mais;119 +5715;Mais;119 +5717;Mais;120 +5718;Mais;119 +5729;Mais;117 +5738;Mais;119 +5743;Mais;6925 +5748;Mais;120 +5750;Mais;117 +5757;Mais;6925 +5765;Mais;6925 +5769;Mais;117 +5777;Mais;6925 +5778;Mais;6925 +5785;Mais;117 +5788;Mais;119 +5789;Mais;119 +5797;Mais;120 +5798;Mais;119 +5802;Mais;120 +5805;Mais;120 +5806;Mais;117 +5821;Vergers;6910 +5824;Mais;6925 +5846;Mais;120 +5847;Mais;119 +5852;Mais;117 +5857;Mais;6925 +5859;Mais;120 +5860;Mais;120 +5862;Mais;117 +5874;Mais;6925 +5878;Vergers;6910 +5879;Vergers;6910 +5880;Mais;120 +5884;Mais;140 +5885;Mais;140 +5892;Mais;119 +5893;Mais;119 +5898;Mais;117 +5908;Mais;3808 +5909;Mais;3808 +5919;Mais;120 +5934;Mais;3819 +5940;Vergers;6910 +5943;Mais;120 +5944;Mais;3808 +5962;Mais;140 +5965;Mais;119 +5967;Mais;3808 +5968;Mais;118 +5975;Mais;140 +5987;Mais;120 +5995;Mais;3819 +5996;Mais;3819 +6004;Mais;117 +6007;Mais;117 +6008;Mais;3808 +6012;Mais;119 +6019;Mais;117 +6030;Mais;3819 +6038;Mais;118 +6042;Mais;3846 +6043;Mais;3808 +6044;Mais;118 +6050;Mais;117 +6051;Mais;3808 +6052;Mais;3819 +6060;Mais;118 +6062;Mais;3846 +6063;Mais;3808 +6073;Mais;6937 +6086;Mais;6937 +6087;Mais;3819 +6093;Mais;3819 +6106;Mais;6937 +6109;Mais;3846 +6115;Mais;117 +6116;Mais;3808 +6122;Mais;3819 +6133;Mais;3846 +6137;Mais;6937 +6138;Mais;117 +6145;Vergers;6931 +6146;Vergers;6931 +6147;Mais;6944 +6149;Mais;3808 +6150;Mais;3808 +6153;Mais;3808 +6154;Mais;3808 +6163;Mais;3846 +6168;Mais;3808 +6171;Mais;6944 +6175;Mais;3808 +6178;Mais;6937 +6179;Mais;6937 +6183;Vergers;6931 +6185;Mais;3819 +6188;Mais;3819 +6191;Vergers;6931 +6193;Mais;3808 +6206;Mais;6944 +6211;Vergers;6931 +6214;Mais;3819 +6217;Mais;6944 +6218;Mais;3846 +6219;Mais;3819 +6225;Vergers;6931 +6227;Mais;3819 +6231;Mais;3808 +6234;Mais;3819 +6235;Mais;3808 +6239;Vergers;6931 +6240;Mais;118 +6243;Mais;3808 +6247;Mais;3819 +6254;Mais;6937 +6255;Mais;6937 +6256;Mais;3819 +6263;Mais;3808 +6268;Mais;3819 +6278;Mais;3819 +6281;Vergers;6931 +6283;Mais;3819 +6288;Vergers;6931 +6289;Mais;6937 +6297;Vergers;6931 +6305;Mais;6945 +6308;Mais;3808 +6309;Mais;3808 +6316;Mais;3808 +6321;Mais;3819 +6327;Mais;3837 +6337;Vergers;6931 +6338;Vergers;6931 +6339;Mais;3808 +6340;Mais;3819 +6350;Vergers;6931 +6352;Mais;6937 +6353;Mais;3808 +6359;Mais;3819 +6360;Mais;6949 +6361;Mais;3819 +6366;Mais;3808 +6371;Mais;3808 +6376;Vergers;6924 +6377;Mais;3837 +6378;Mais;3808 +6379;Mais;3819 +6382;Vergers;6924 +6386;Mais;6938 +6392;Mais;3837 +6393;Mais;3808 +6394;Mais;3819 +6396;Mais;3819 +6401;Vergers;6931 +6402;Mais;3819 +6403;Mais;3819 +6408;Vergers;6924 +6409;Mais;6938 +6411;Mais;3808 +6412;Mais;3853 +6419;Mais;6938 +6420;Mais;3853 +6426;Vergers;6948 +6427;Mais;3819 +6434;Mais;3853 +6441;Mais;6938 +6443;Mais;3853 +6444;Mais;3819 +6447;Vergers;6924 +6449;Mais;6938 +6450;Mais;3815 +6452;Vergers;6924 +6454;Mais;3853 +6459;Mais;3837 +6461;Mais;3808 +6467;Vergers;6924 +6468;Mais;3853 +6470;Mais;6938 +6480;Vergers;6924 +6481;Mais;6949 +6483;Mais;6938 +6484;Mais;3853 +6490;Mais;6938 +6491;Mais;3819 +6494;Mais;6938 +6496;Mais;3853 +6497;Mais;3819 +6508;Mais;6938 +6509;Mais;3815 +6512;Mais;3819 +6513;Mais;3819 +6514;Mais;3819 +6518;Mais;3853 +6527;Mais;3853 +6530;Vergers;6924 +6538;Mais;6938 +6545;Mais;6938 +6551;Mais;3837 +6555;Mais;6938 +6557;Mais;6938 +6558;Mais;3815 +6564;Mais;3837 +6565;Mais;3853 +6572;Vergers;6924 +6573;Vergers;6924 +6574;Mais;3853 +6584;Vergers;6924 +6587;Mais;6938 +6588;Mais;3815 +6604;Vergers;6924 +6606;Mais;3837 +6608;Mais;3853 +6611;Mais;3837 +6626;Vergers;6924 +6632;Mais;3815 +6638;Vergers;6924 +6641;Mais;3815 +6648;Mais;3815 +6653;Mais;3815 +6674;Mais;3837 +6684;Vergers;6924 +6686;Mais;3837 +6698;Mais;3815 +6711;Maraichage;6907 +6712;Mais;3815 +6727;Mais;3815 +6748;Mais;3815 +6753;Mais;3837 +6772;Mais;3815 +6780;Vergers;4233 +6809;Maraichage;6907 +6824;Maraichage;6907 +6837;Maraichage;6907 +6844;Vergers;4233 +6857;Vergers;4233 +6868;Maraichage;6907 +6881;Maraichage;6907 +6911;Vergers;4233 +6934;Vergers;4213 +6935;Maraichage;6907 +6943;Mais;3807 +6957;Maraichage;6907 +6958;Maraichage;6907 +6967;Vergers;4233 +6983;Vergers;4233 +6988;Vergers;4233 +7042;Mais;3807 +7047;Mais;3802 +7053;Vergers;4213 +7060;Mais;3802 +7061;Mais;3802 +7066;Mais;3807 +7070;Vergers;4213 +7071;Vergers;3824 +7076;Mais;3802 +7084;Mais;3807 +7091;Mais;3802 +7095;Vergers;3824 +7103;Vergers;4213 +7104;Mais;3802 +7111;Mais;3807 +7115;Mais;3807 +7119;Vergers;3824 +7120;Vergers;3824 +7121;Mais;3807 +7122;Mais;3807 +7134;Vergers;4213 +7135;Vergers;3824 +7136;Mais;3807 +7144;Vergers;4213 +7146;Mais;3802 +7147;Mais;3802 +7155;Mais;3807 +7156;Mais;3807 +7161;Mais;3807 +7167;Vergers;4233 +7175;Vergers;3824 +7176;Mais;3802 +7177;Mais;3802 +7178;Mais;3802 +7182;Mais;3807 +7192;Mais;3807 +7197;Vergers;4213 +7198;Mais;3807 +7204;Mais;3807 +7209;Mais;3807 +7219;Vergers;3824 +7220;Mais;3802 +7221;Mais;3802 +7222;Mais;3807 +7227;Mais;3807 +7236;Mais;3802 +7237;Mais;3802 +7238;Mais;3802 +7239;Mais;3807 +7249;Vergers;4213 +7250;Vergers;4213 +7258;Vergers;3824 +7259;Mais;3807 +7265;Vergers;4213 +7268;Vergers;3824 +7270;Mais;3807 +7282;Mais;3807 +7291;Vergers;3824 +7292;Mais;3802 +7301;Vergers;3824 +7302;Mais;3802 +7303;Mais;3802 +7304;Mais;3802 +7305;Mais;3807 +7307;Mais;3802 +7309;Vergers;3824 +7315;Vergers;3824 +7332;Mais;3807 +7339;Mais;3825 +7343;Vergers;722 +7344;Mais;3825 +7347;Mais;3802 +7353;Vergers;3824 +7363;Mais;3802 +7365;Vergers;4213 +7366;Mais;3802 +7374;Vergers;3824 +7379;Vergers;722 +7380;Vergers;3824 +7384;Vergers;3824 +7390;Mais;3825 +7392;Mais;3825 +7396;Vergers;4213 +7397;Mais;2611 +7398;Mais;3825 +7409;Mais;2611 +7412;Vergers;3824 +7422;Vergers;3824 +7423;Mais;3825 +7436;Mais;3825 +7440;Vergers;722 +7450;Vergers;3824 +7462;Mais;2611 +7469;Vergers;722 +7470;Vergers;3824 +7471;Mais;3825 +7474;Mais;3825 +7480;Mais;2611 +7483;Vergers;722 +7484;Mais;2626 +7485;Mais;3825 +7489;Vergers;722 +7494;Vergers;722 +7495;Mais;2626 +7497;Mais;3825 +7509;Mais;2611 +7518;Vergers;722 +7524;Mais;3825 +7539;Mais;2611 +7540;Mais;3825 +7545;Mais;2626 +7546;Mais;2611 +7556;Vergers;722 +7567;Mais;2626 +7568;Mais;3825 +7572;Vergers;722 +7573;Vergers;722 +7576;Mais;2611 +7577;Mais;2611 +7586;Vergers;722 +7587;Mais;2626 +7588;Mais;2611 +7589;Vergers;722 +7590;Mais;2611 +7591;Mais;3825 +7599;Mais;2626 +7616;Mais;2626 +7624;Mais;2626 +7625;Mais;2611 +7641;Mais;3825 +7648;Mais;2626 +7649;Mais;2611 +7657;Mais;2611 +7663;Mais;2626 +7668;Mais;2626 +7671;Mais;2611 +7678;Mais;2626 +7679;Mais;2611 +7684;Mais;2626 +7685;Mais;2626 +7689;Mais;2611 +7693;Mais;2611 +7694;Vergers;3830 +7709;Vergers;3830 +7715;Mais;2626 +7716;Mais;2621 +7724;Mais;2626 +7729;Vergers;3830 +7743;Mais;2626 +7750;Mais;2626 +7751;Mais;2621 +7759;Mais;2626 +7765;Mais;2623 +7770;Mais;2626 +7775;Vergers;3830 +7779;Mais;2626 +7780;Mais;2611 +7787;Vergers;3830 +7788;Vergers;3830 +7792;Vergers;3830 +7802;Vergers;724 +7808;Vergers;3830 +7809;Vergers;3830 +7816;Mais;2623 +7819;Vergers;724 +7820;Mais;2623 +7821;Mais;2621 +7822;Vergers;3830 +7826;Mais;2626 +7842;Mais;2626 +7843;Mais;2621 +7848;Mais;2626 +7849;Vergers;3830 +7857;Mais;2626 +7858;Vergers;3830 +7863;Mais;2626 +7867;Mais;2621 +7871;Vergers;3830 +7876;Mais;2621 +7877;Vergers;3830 +7886;Vergers;724 +7892;Mais;2623 +7893;Vergers;724 +7894;Mais;2623 +7896;Mais;2621 +7906;Mais;2621 +7912;Mais;2621 +7913;Vergers;3830 +7914;Vergers;3830 +7915;Vergers;3822 +7921;Vergers;724 +7925;Vergers;2628 +7929;Vergers;2628 +7930;Mais;2623 +7933;Mais;2621 +7938;Mais;2623 +7939;Vergers;3830 +7950;Mais;2623 +7951;Mais;2623 +7955;Vergers;2628 +7956;Mais;2623 +7957;Mais;2621 +7961;Vergers;724 +7962;Mais;2623 +7963;Mais;2621 +7967;Vergers;3830 +7968;Vergers;3830 +7969;Vergers;3822 +7975;Mais;2623 +7976;Vergers;3830 +7980;Mais;2621 +7984;Mais;2623 +7985;Mais;2621 +7992;Mais;2621 +7994;Vergers;2628 +7999;Vergers;717 +8000;Mais;2621 +8003;Mais;2621 +8010;Mais;2621 +8012;Vergers;724 +8013;Vergers;3830 +8014;Vergers;3830 +8020;Mais;2602 +8021;Vergers;3822 +8024;Vergers;3822 +8034;Vergers;2628 +8039;Vergers;717 +8040;Vergers;2628 +8041;Vergers;3822 +8048;Vergers;717 +8049;Vergers;2628 +8050;Mais;2621 +8052;Mais;2621 +8053;Mais;2621 +8054;Vergers;717 +8055;Vergers;2628 +8056;Mais;2621 +8058;Vergers;3830 +8061;Mais;2621 +8078;Vergers;3822 +8090;Vergers;3822 +8094;Vergers;717 +8095;Mais;2621 +8104;Mais;2602 +8115;Mais;2621 +8118;Mais;2602 +8121;Vergers;724 +8124;Vergers;717 +8125;Vergers;724 +8126;Vergers;2628 +8128;Vergers;2628 +8129;Mais;2602 +8133;Vergers;2628 +8134;Mais;2621 +8135;Mais;2602 +8154;Vergers;724 +8160;Vergers;717 +8167;Vergers;724 +8168;Mais;2602 +8178;Mais;2602 +8179;Mais;2602 +8181;Vergers;2628 +8182;Mais;2602 +8185;Mais;2602 +8186;Mais;2602 +8203;Vergers;717 +8204;Mais;2602 +8206;Mais;2602 +8225;Vergers;2628 +8226;Vergers;724 +8227;Mais;2602 +8230;Vergers;724 +8236;Vergers;724 +8237;Vergers;2628 +8238;Mais;2602 +8246;Mais;2602 +8253;Mais;2602 +8258;Mais;2602 +8263;Vergers;2628 +8280;Mais;2602 +8286;Mais;2602 +8289;Mais;2602 +8294;Mais;2602 +8298;Mais;2602 +8310;Mais;2632 +8311;Mais;2602 +8316;Mais;2632 +8322;Mais;2602 +8337;Mais;2602 +8343;Mais;2602 +8360;Mais;2602 +8377;Mais;2602 +8391;Mais;2602 +8413;Mais;2602 +8431;Mais;2604 +8436;Mais;2629 +8441;Mais;2604 +8442;Mais;2602 +8449;Mais;2604 +8450;Mais;2602 +8453;Mais;2604 +8459;Mais;2604 +8480;Mais;2604 +8496;Mais;2604 +8543;Mais;2604 +8550;Mais;2634 +8558;Mais;2629 +8559;Mais;2604 +8562;Mais;2604 +8567;Mais;2604 +8575;Mais;2604 +8594;Mais;2634 +8609;Mais;2604 +8614;Mais;2604 +8640;Mais;2604 +8644;Mais;2604 +8651;Mais;2634 +8657;Mais;2634 +8678;Mais;2634 +8686;Mais;2634 +8700;Mais;2634 +8712;Mais;2604 +8727;Mais;2634 +8741;Mais;2634 +8765;Mais;2604 +8779;Mais;2604 +8783;Mais;2634 +8784;Mais;2604 +8803;Vergers;2613 +8804;Mais;2604 +8823;Mais;2604 +8849;Mais;2607 +8854;Mais;2604 +8870;Mais;2607 +8871;Mais;2607 +8897;Mais;2604 +8905;Vergers;2613 +8921;Mais;2607 +8926;Mais;2607 +8942;Vergers;2613 +8943;Mais;2607 +8957;Vergers;2613 +8962;Vergers;2613 +8982;Mais;2607 +8984;Vergers;2613 +8985;Mais;2607 +8986;Mais;2607 +9010;Mais;2607 +9027;Prairies;518 +9033;Vergers;2613 +9034;Mais;2607 +9035;Mais;2607 +9043;Mais;2607 +9044;Prairies;518 +9061;Vergers;2613 +9063;Mais;2607 +9064;Mais;2607 +9079;Mais;2607 +9088;Prairies;518 +9096;Mais;2607 +9099;Prairies;518 +9107;Vergers;2613 +9108;Mais;2607 +9117;Mais;2607 +9144;Mais;2607 +9145;Mais;2607 +9148;Prairies;518 +9159;Prairies;518 +9168;Mais;2607 +9171;Mais;2607 +9172;Mais;2607 +9173;Mais;2607 +9186;Prairies;518 +9188;Mais;2607 +9193;Vergers;2613 +9194;Vergers;2613 +9195;Mais;2607 +9201;Mais;2607 +9225;Prairies;518 +9236;Prairies;518 +9247;Prairies;518 +9248;Prairies;518 +9249;Prairies;518 +9263;Mais;2607 +9270;Mais;2607 +9273;Prairies;518 +9300;Prairies;518 +9301;Prairies;518 +9304;Vergers;2613 +9311;Vergers;2613 +9326;Prairies;518 +9327;Prairies;518 +9336;Prairies;518 +9350;Prairies;518 +9356;Mais;2607 +9362;Mais;2615 +9403;Prairies;509 +9410;Mais;2615 +9419;Mais;2607 +9421;Prairies;509 +9443;Mais;2615 +9444;Mais;2615 +9449;Mais;2615 +9451;Mais;2607 +9458;Prairies;505 +9463;Mais;2615 +9466;Prairies;509 +9472;Prairies;505 +9482;Mais;2615 +9492;Mais;2607 +9493;Mais;2607 +9509;Mais;2615 +9514;Prairies;509 +9515;Mais;2615 +9522;Mais;2615 +9525;Prairies;505 +9527;Prairies;505 +9535;Mais;2615 +9536;Mais;2615 +9542;Prairies;505 +9562;Prairies;509 +9573;Prairies;509 +9578;Prairies;509 +9595;Mais;2615 +9598;Prairies;505 +9614;Mais;2616 +9619;Mais;2615 +9624;Mais;2615 +9627;Prairies;509 +9630;Mais;2615 +9634;Mais;2615 +9638;Prairies;509 +9654;Prairies;524 +9655;Prairies;524 +9656;Prairies;505 +9666;Prairies;505 +9670;Mais;2616 +9673;Prairies;509 +9679;Prairies;524 +9680;Prairies;509 +9689;Prairies;509 +9697;Mais;2616 +9700;Prairies;505 +9718;Mais;2616 +9721;Prairies;509 +9729;Prairies;509 +9730;Prairies;509 +9731;Prairies;505 +9733;Mais;2616 +9734;Prairies;524 +9751;Mais;2616 +9753;Prairies;509 +9770;Prairies;523 +9775;Mais;2616 +9808;Prairies;524 +9810;Prairies;523 +9825;Mais;2616 +9830;Mais;2616 +9842;Prairies;524 +9847;Mais;2616 +9850;Prairies;524 +9852;Mais;2616 +9860;Prairies;523 +9871;Mais;2616 +9880;Mais;2616 +9882;Prairies;523 +9890;Prairies;522 +9891;Mais;2616 +9926;Prairies;523 +9938;Mais;2616 +9940;Prairies;523 +9949;Prairies;522 +9951;Prairies;523 +9990;Prairies;524 +10006;Prairies;522 +10007;Prairies;522 +10016;Prairies;523 +10018;Prairies;523 +10022;Mais;2619 +10025;Prairies;523 +10029;Prairies;522 +10063;Prairies;522 +10087;Prairies;522 +10094;Prairies;522 +10120;Mais;2619 +10130;Mais;2619 +10134;Vergers;416 +10156;Prairies;522 +10157;Prairies;522 +10158;Vergers;512 +10186;Prairies;522 +10195;Vergers;512 +10201;Mais;2619 +10224;Vergers;416 +10228;Mais;2619 +10234;Mais;2619 +10236;Vergers;512 +10246;Prairies;522 +10267;Mais;2619 +10276;Mais;2619 +10284;Vergers;512 +10288;Vergers;512 +10293;Mais;2619 +10302;Mais;2619 +10317;Vigne;2625 +10327;Vergers;512 +10332;Vergers;512 +10358;Vigne;2625 +10364;Mais;2619 +10378;Vigne;2625 +10379;Vergers;512 +10380;Mais;2619 +10395;Vergers;8406 +10398;Vergers;512 +10405;Mais;2619 +10410;Vergers;515 +10411;Vergers;512 +10415;Mais;2619 +10426;Vigne;2625 +10429;Vigne;2625 +10438;Vigne;2625 +10442;Vergers;416 +10452;Mais;2619 +10453;Vigne;2625 +10456;Vergers;512 +10457;Vergers;512 +10463;Vergers;512 +10468;Vigne;2625 +10490;Vergers;515 +10500;Vigne;2625 +10512;Vigne;2625 +10520;Vigne;2625 +10533;Vergers;416 +10537;Vergers;416 +10545;Vergers;416 +10552;Vergers;512 +10557;Vergers;8406 +10558;Vigne;2625 +10566;Vigne;2625 +10588;Vigne;2625 +10604;Vergers;8406 +10609;Vigne;2625 +10615;Vergers;427 +10620;Vergers;427 +10634;Vigne;2625 +10636;Vergers;427 +10640;Vergers;8406 +10645;Vergers;427 +10649;Vigne;2625 +10657;Vergers;8406 +10674;Vergers;427 +10679;Vigne;2625 +10683;Vergers;427 +10697;Vergers;416 +10703;Vigne;2625 +10711;Vergers;8406 +10712;Vergers;516 +10722;Vergers;8406 +10732;Vergers;8406 +10733;Vergers;8406 +10734;Vergers;8406 +10745;Vergers;8406 +10749;Vergers;8406 +10757;Vergers;516 +10758;Vergers;427 +10785;Vergers;8406 +10792;Vergers;8406 +10827;Vergers;516 +10834;Vergers;427 +10835;Vergers;427 +10846;Vergers;8406 +10879;Vergers;8406 +10893;Vigne;8416 +10894;Vigne;8416 +10908;Vergers;8406 +10931;Vigne;8416 +10938;Vigne;8416 +10947;Vigne;8416 +10970;Vigne;8416 +10971;Vigne;8416 +10974;Ble dur;430 +10979;Ble dur;430 +11004;Vigne;8416 +11026;Vigne;8416 +11027;Vigne;8416 +11032;Vigne;8416 +11057;Vigne;8416 +11063;Vigne;8416 +11069;Vigne;8416 +11075;Vigne;8416 +11082;Ble dur;430 +11116;Ble dur;430 +11120;Vigne;8416 +11136;Vigne;8416 +11143;Vigne;8415 +11166;Vigne;8416 +11167;Vigne;8409 +11175;Vigne;8415 +11179;Vigne;8415 +11180;Vigne;8415 +11188;Vigne;8416 +11189;Vigne;8416 +11190;Vigne;8409 +11191;Vigne;8415 +11198;Mais;8405 +11208;Vigne;8409 +11216;Vigne;8416 +11223;Vigne;8409 +11224;Vigne;8409 +11227;Ble dur;430 +11232;Vigne;8415 +11238;Vigne;8416 +11245;Vigne;8416 +11246;Mais;8405 +11261;Vigne;8416 +11262;Vigne;8409 +11266;Vigne;8409 +11267;Vigne;8415 +11274;Maraichage;3023 +11275;Vigne;8416 +11276;Vigne;8415 +11281;Vigne;8416 +11283;Ble dur;430 +11305;Vigne;8409 +11313;Maraichage;3023 +11314;Maraichage;3023 +11315;Mais;8405 +11316;Vigne;8409 +11319;Vigne;8416 +11320;Vigne;8416 +11321;Vigne;8409 +11333;Mais;8405 +11349;Vigne;8409 +11350;Vigne;8409 +11354;Vigne;8416 +11355;Mais;8405 +11363;Maraichage;3023 +11366;Maraichage;3023 +11367;Maraichage;3023 +11368;Vigne;8409 +11374;Mais;8405 +11375;Vigne;8409 +11390;Vigne;8409 +11398;Vigne;8416 +11401;Vergers;414 +11406;Maraichage;3023 +11407;Vigne;8409 +11408;Vigne;8415 +11416;Maraichage;8418 +11417;Vigne;8409 +11419;Vigne;8415 +11425;Maraichage;3023 +11426;Vigne;8409 +11434;Vigne;8415 +11450;Maraichage;3026 +11453;Maraichage;3023 +11454;Vigne;8416 +11455;Vigne;8416 +11460;Vigne;8409 +11468;Maraichage;3023 +11469;Vigne;8409 +11470;Vigne;8409 +11473;Vigne;8409 +11483;Maraichage;3023 +11485;Maraichage;3023 +11486;Mais;8405 +11491;Mais;8405 +11493;Vergers;414 +11499;Maraichage;3023 +11500;Maraichage;8418 +11510;Maraichage;3026 +11512;Mais;8405 +11521;Vigne;8409 +11523;Vergers;414 +11526;Mais;8405 +11534;Vergers;414 +11536;Maraichage;8418 +11540;Vigne;8409 +11543;Maraichage;8418 +11546;Mais;8405 +11547;Vigne;8409 +11562;Vigne;8409 +11571;Maraichage;3023 +11572;Vigne;8415 +11574;Vergers;414 +11578;Vigne;8415 +11583;Maraichage;3023 +11584;Vigne;8409 +11586;Maraichage;410 +11588;Vergers;414 +11598;Maraichage;3026 +11606;Prairies;8423 +11607;Maraichage;3023 +11608;Mais;8405 +11613;Maraichage;3026 +11617;Maraichage;8418 +11622;Maraichage;8418 +11625;Maraichage;410 +11626;Vergers;414 +11630;Maraichage;8418 +11631;Maraichage;8418 +11635;Maraichage;8418 +11639;Maraichage;410 +11644;Vergers;8413 +11649;Mais;8405 +11651;Maraichage;419 +11652;Maraichage;3026 +11655;Maraichage;8418 +11656;Maraichage;8418 +11670;Vergers;414 +11673;Maraichage;3026 +11675;Maraichage;419 +11680;Maraichage;8418 +11690;Vergers;414 +11694;Vergers;8413 +11700;Maraichage;3026 +11706;Maraichage;410 +11711;Maraichage;420 +11712;Vergers;414 +11716;Vergers;8413 +11721;Maraichage;420 +11726;Maraichage;419 +11734;Prairies;8423 +11739;Maraichage;8418 +11742;Maraichage;3026 +11746;Maraichage;410 +11750;Maraichage;3026 +11751;Maraichage;3026 +11753;Vergers;414 +11758;Maraichage;410 +11763;Maraichage;3026 +11782;Vergers;3006 +11785;Maraichage;3026 +11786;Vergers;8413 +11790;Maraichage;410 +11797;Vergers;414 +11799;Vergers;8413 +11804;Maraichage;410 +11807;Vergers;3006 +11811;Maraichage;3026 +11814;Maraichage;410 +11818;Vergers;8413 +11819;Vergers;8413 +11823;Maraichage;410 +11827;Vergers;8413 +11828;Vergers;8413 +11829;Vergers;414 +11832;Ble dur;421 +11834;Maraichage;3026 +11841;Vergers;1307 +11851;Maraichage;410 +11852;Ble dur;429 +11855;Vergers;3006 +11857;Maraichage;420 +11858;Ble dur;421 +11862;Prairies;8423 +11864;Vergers;8413 +11865;Maraichage;410 +11872;Vergers;8413 +11876;Maraichage;3026 +11877;Vergers;3006 +11878;Ble dur;421 +11881;Vergers;8413 +11885;Ble dur;429 +11887;Vergers;1307 +11889;Vergers;414 +11897;Vergers;8413 +11902;Vergers;8411 +11903;Ble dur;413 +11904;Ble dur;429 +11909;Vergers;3006 +11910;Vergers;3006 +11919;Vergers;8413 +11922;Vergers;3006 +11923;Vergers;1307 +11927;Vergers;8411 +11928;Maraichage;420 +11931;Vergers;8411 +11932;Ble dur;429 +11935;Prairies;8423 +11937;Maraichage;420 +11938;Vergers;3006 +11940;Ble dur;429 +11942;Vergers;1326 +11944;Maraichage;420 +11948;Ble dur;413 +11949;Ble dur;421 +11950;Vergers;1307 +11951;Vergers;8411 +11952;Vergers;8413 +11957;Ble dur;421 +11961;Maraichage;420 +11963;Ble dur;429 +11968;Ble dur;421 +11971;Vergers;3006 +11978;Riz;3045 +11980;Vergers;8411 +11983;Ble dur;429 +11984;Ble dur;429 +11985;Vergers;8411 +11987;Vergers;3006 +11992;Ble dur;413 +11993;Ble dur;421 +12000;Maraichage;420 +12003;Vergers;8411 +12005;Ble dur;421 +12006;Maraichage;1333 +12009;Ble dur;429 +12017;Ble dur;413 +12022;Vergers;8411 +12023;Maraichage;1333 +12028;Ble dur;421 +12033;Vergers;1307 +12036;Ble dur;413 +12039;Vergers;1326 +12040;Vergers;8411 +12046;Vergers;3009 +12058;Vergers;3006 +12059;Vergers;1326 +12063;Vergers;1326 +12066;Ble dur;421 +12073;Ble dur;429 +12074;Ble dur;421 +12078;Vergers;3009 +12079;Vergers;1326 +12084;Vergers;3009 +12087;Ble dur;413 +12091;Ble dur;421 +12093;Vergers;8411 +12106;Vergers;3016 +12107;Ble dur;413 +12109;Vergers;3009 +12114;Vergers;1307 +12115;Vergers;1326 +12120;Ble dur;413 +12125;Ble dur;413 +12126;Ble dur;413 +12129;Vergers;3009 +12130;Vergers;3009 +12132;Ble dur;421 +12142;Vergers;1326 +12144;Ble dur;421 +12146;Vergers;1326 +12150;Ble dur;413 +12151;Vergers;1326 +12153;Ble dur;421 +12155;Vergers;1331 +12167;Vergers;3016 +12173;Vergers;8411 +12174;Vergers;8411 +12180;Prairies;8408 +12184;Vergers;1326 +12186;Ble dur;429 +12196;Ble dur;421 +12202;Vergers;1326 +12204;Vergers;1326 +12210;Vergers;1331 +12212;Ble dur;421 +12214;Vergers;1326 +12215;Prairies;8408 +12219;Vergers;1326 +12241;Vergers;8411 +12242;Ble dur;8319 +12246;Prairies;8408 +12256;Vergers;1331 +12260;Vergers;1331 +12262;Prairies;8408 +12267;Prairies;8408 +12270;Prairies;8408 +12272;Prairies;8408 +12273;Prairies;8408 +12274;Prairies;8408 +12275;Prairies;8408 +12276;Prairies;8408 +12281;Prairies;8408 +12286;Prairies;1312 +12289;Vergers;1326 +12290;Prairies;8408 +12296;Prairies;1309 +12298;Ble dur;8319 +12301;Vergers;1326 +12303;Ble dur;421 +12307;Prairies;8408 +12308;Vergers;1326 +12313;Prairies;1309 +12314;Ble dur;8319 +12317;Prairies;8408 +12319;Vergers;1326 +12320;Prairies;1309 +12329;Prairies;1312 +12330;Prairies;8408 +12337;Prairies;1309 +12342;Prairies;1309 +12345;Prairies;8408 +12353;Prairies;1309 +12356;Ble dur;8319 +12364;Prairies;1309 +12375;Prairies;1309 +12378;Prairies;1312 +12393;Prairies;8408 +12394;Ble dur;1327 +12397;Ble dur;1327 +12408;Ble dur;8319 +12409;Prairies;1309 +12410;Ble dur;8319 +12412;Ble dur;1327 +12415;Ble dur;1327 +12417;Ble dur;1327 +12422;Ble dur;1327 +12423;Ble dur;1327 diff --git a/irrigation-R-codes/Irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx b/irrigation-R-codes/Irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..7206e58f88655c1539ed6d9e7e2c239551cc035f Binary files /dev/null and b/irrigation-R-codes/Irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007.xlsx b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..2d4eac328751c30c8de5cbba74a10f3b2045caaf Binary files /dev/null and b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007_demande_corrigee.xlsx b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007_demande_corrigee.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..b33f59c632726183464dea0795892ee74dff66b9 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_avec_dose_1987_2007_demande_corrigee.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007.xlsx b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..53c8fbdc9fa4a07160ebd244e2a78564dda76574 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007_demande_corrigee.xlsx b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007_demande_corrigee.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..82ece5dd0cf5dde1de96c799fab998b52fc78d22 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Resultats/Resultats_irrigation_new_sans_dose_1987_2007_demande_corrigee.xlsx differ diff --git a/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.dbf b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.dbf new file mode 100644 index 0000000000000000000000000000000000000000..8fe3e6b55e9cb005a8ce1fc9599d9ca3d5673a15 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.dbf differ diff --git a/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.prj b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.prj new file mode 100644 index 0000000000000000000000000000000000000000..56757fc434d98d5eddf8c29389e42675dd4845ab --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.prj @@ -0,0 +1 @@ +PROJCS["RGF93_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",44.0],PARAMETER["Standard_Parallel_2",49.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shp b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shp new file mode 100644 index 0000000000000000000000000000000000000000..fe2b75b8edcd380d7185c3338cfae0ca889303fe Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shp differ diff --git a/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shx b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shx new file mode 100644 index 0000000000000000000000000000000000000000..2195167a42df672c2fe3f505ab4ace8e2e5bff5c Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.shx differ diff --git a/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.xml b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.xml new file mode 100644 index 0000000000000000000000000000000000000000..528fa5b86ea2f0e1681accdeabaf5bfa54916995 --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/CantonsModif_BVRhone_IRR.xml @@ -0,0 +1,2 @@ +<?xml version="1.0" encoding="UTF-8"?> +<metadata xml:lang="fr"><Esri><CreaDate>20131107</CreaDate><CreaTime>09120500</CreaTime><ArcGISFormat>1.0</ArcGISFormat><SyncOnce>FALSE</SyncOnce><DataProperties><itemProps><itemName Sync="TRUE">CantonsModif_BVRhone_V2</itemName><imsContentType Sync="TRUE">002</imsContentType><itemSize Sync="TRUE">0.000</itemSize><itemLocation><linkage Sync="TRUE">file://\\ENTREPOT-HH\depot\MDR\SIG\Géographie\Administratif\CantonsModif_BVRhone_V2.shp</linkage><protocol Sync="TRUE">Local Area Network</protocol></itemLocation></itemProps><coordRef><type Sync="TRUE">Projected</type><geogcsn Sync="TRUE">GCS_RGF_1993</geogcsn><projcsn Sync="TRUE">RGF93_Lambert_93</projcsn><peXml Sync="TRUE"><ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.0'><WKT>PROJCS[&quot;RGF93_Lambert_93&quot;,GEOGCS[&quot;GCS_RGF_1993&quot;,DATUM[&quot;D_RGF_1993&quot;,SPHEROID[&quot;GRS_1980&quot;,6378137.0,298.257222101]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Lambert_Conformal_Conic&quot;],PARAMETER[&quot;False_Easting&quot;,700000.0],PARAMETER[&quot;False_Northing&quot;,6600000.0],PARAMETER[&quot;Central_Meridian&quot;,3.0],PARAMETER[&quot;Standard_Parallel_1&quot;,44.0],PARAMETER[&quot;Standard_Parallel_2&quot;,49.0],PARAMETER[&quot;Latitude_Of_Origin&quot;,46.5],UNIT[&quot;Meter&quot;,1.0]]</WKT><XOrigin>-35597500</XOrigin><YOrigin>-23641900</YOrigin><XYScale>124074650.52332792</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision></ProjectedCoordinateSystem></peXml></coordRef><lineage><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Analysis Tools.tbx\Intersect" Date="20131004" Time="112739">Intersect "'>>>Divers Sols\>RGA 2010\RGA2010_Cantons_SurfTotales' #;'>>>Géographie\>Administratif France\Communes_GEOFLA2012_France' #" Z:\MDR\SIG\Géographie\Administratif\Communes_GEOFLA2012_BVRhone.shp ALL # INPUT</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\CalculateField" Date="20131107" Time="085150">CalculateField Communes_GEOFLA2012_BVRhone RScom "[SUPERFIC_1]/ [ScomINbv]" VB #</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\CalculateField" Date="20131107" Time="085222">CalculateField Communes_GEOFLA2012_BVRhone RScom [SUPERFIC_1]/[ScomINbv] VB #</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\CalculateField" Date="20131107" Time="085255">CalculateField Communes_GEOFLA2012_BVRhone RScom [SUPERFIC_1]/[ScomINbv] VB #</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Analysis Tools.tbx\Clip" Date="20131107" Time="090607">Clip ">>>Géographie\>Administratif France\Communes_GEOFLA2012_BVRhone" ">>>Hydrologie\> Bassins Versants\Bassin du Rhone - EL200m" Z:\MDR\SIG\Géographie\Administratif\Communes_GEOFLA2012_BVRhone_V2.shp #</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\CalculateField" Date="20131107" Time="091058">CalculateField Communes_GEOFLA2012_BVRhone_V2 RTCOM "[SCOMINBV]/ [SCOM]" VB #</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\Dissolve" Date="20131107" Time="093009">Dissolve ">>>Géographie\>Administratif France\Cantons_GEOFLA2012_BVRhone_V2" Z:\MDR\SIG\Géographie\Administratif\CantonsModif_BVRhone_V2.shp SCANT;SCANTINBV;RTCANT;CODE_CAN_1;NOM_CANTON # MULTI_PART DISSOLVE_LINES</Process><Process ToolSource="C:\Program Files (x86)\ArcGIS\Desktop10.0\ArcToolbox\Toolboxes\Data Management Tools.tbx\CalculateField" Date="20131107" Time="093239">CalculateField CantonsModif_BVRhone_V2 RTCANT "[SCANTINBV]/ [SCANT]" VB #</Process></lineage></DataProperties><SyncDate>20131107</SyncDate><SyncTime>09300500</SyncTime><ModDate>20131107</ModDate><ModTime>09300500</ModTime></Esri><dataIdInfo><envirDesc Sync="TRUE">Microsoft Windows Server 2008 R2 Version 6.1 (Build 7601) Service Pack 1; ESRI ArcGIS 10.0.0.2414</envirDesc><dataLang><languageCode value="fra" country="FRA"></languageCode></dataLang><idCitation><resTitle Sync="TRUE">CantonsModif_BVRhone_V2</resTitle><presForm><PresFormCd value="005"></PresFormCd></presForm></idCitation><descKeys><thesaName uuidref="723f6998-058e-11dc-8314-0800200c9a66"></thesaName><keyword Sync="TRUE">002</keyword></descKeys><spatRpType><SpatRepTypCd value="001"></SpatRepTypCd></spatRpType></dataIdInfo><mdLang><languageCode value="fra"></languageCode></mdLang><mdChar><CharSetCd value="004"></CharSetCd></mdChar><distInfo><distFormat><formatName Sync="TRUE">Fichier de formes</formatName></distFormat><distTranOps><transSize Sync="TRUE">0.000</transSize></distTranOps></distInfo><mdHrLv><ScopeCd value="005"></ScopeCd></mdHrLv><mdHrLvName Sync="TRUE">dataset</mdHrLvName><refSysInfo><RefSystem><refSysID><identCode code="0"></identCode></refSysID></RefSystem></refSysInfo><spdoinfo><ptvctinf><esriterm Name="CantonsModif_BVRhone_V2"><efeatyp Sync="TRUE">Simple</efeatyp><efeageom Sync="TRUE">4</efeageom><esritopo Sync="TRUE">FALSE</esritopo><efeacnt Sync="TRUE">0</efeacnt><spindex Sync="TRUE">FALSE</spindex><linrefer Sync="TRUE">FALSE</linrefer></esriterm></ptvctinf></spdoinfo><spatRepInfo><VectSpatRep><topLvl><TopoLevCd value="001"></TopoLevCd></topLvl><geometObjs><geoObjTyp><GeoObjTypCd value="002"></GeoObjTypCd><geoObjCnt Sync="TRUE">0</geoObjCnt></geoObjTyp></geometObjs></VectSpatRep></spatRepInfo><eainfo><detailed Name="CantonsModif_BVRhone_V2"><enttyp><enttypl Sync="TRUE">CantonsModif_BVRhone_V2</enttypl><enttypt Sync="TRUE">Feature Class</enttypt><enttypc Sync="TRUE">0</enttypc></enttyp><attr><attrlabl Sync="TRUE">FID</attrlabl><attalias Sync="TRUE">FID</attalias><attrtype Sync="TRUE">OID</attrtype><attwidth Sync="TRUE">4</attwidth><atprecis Sync="TRUE">0</atprecis><attscale Sync="TRUE">0</attscale><attrdef Sync="TRUE">Internal feature number.</attrdef><attrdefs Sync="TRUE">ESRI</attrdefs><attrdomv><udom Sync="TRUE">Sequential unique whole numbers that are automatically generated.</udom></attrdomv></attr><attr><attrlabl Sync="TRUE">Shape</attrlabl><attalias Sync="TRUE">Shape</attalias><attrtype Sync="TRUE">Geometry</attrtype><attwidth Sync="TRUE">0</attwidth><atprecis Sync="TRUE">0</atprecis><attscale Sync="TRUE">0</attscale><attrdef Sync="TRUE">Feature geometry.</attrdef><attrdefs Sync="TRUE">ESRI</attrdefs><attrdomv><udom Sync="TRUE">Coordinates defining the features.</udom></attrdomv></attr><attr><attrlabl Sync="TRUE">SCANT</attrlabl><attalias Sync="TRUE">SCANT</attalias><attrtype Sync="TRUE">Double</attrtype><attwidth Sync="TRUE">17</attwidth><atprecis Sync="TRUE">16</atprecis><attscale Sync="TRUE">2</attscale></attr><attr><attrlabl Sync="TRUE">SCANTINBV</attrlabl><attalias Sync="TRUE">SCANTINBV</attalias><attrtype Sync="TRUE">Double</attrtype><attwidth Sync="TRUE">16</attwidth><atprecis Sync="TRUE">15</atprecis><attscale Sync="TRUE">2</attscale></attr><attr><attrlabl Sync="TRUE">RTCANT</attrlabl><attalias Sync="TRUE">RTCANT</attalias><attrtype Sync="TRUE">Double</attrtype><attwidth Sync="TRUE">11</attwidth><atprecis Sync="TRUE">10</atprecis><attscale Sync="TRUE">3</attscale></attr><attr><attrlabl Sync="TRUE">CODE_CAN_1</attrlabl><attalias Sync="TRUE">CODE_CAN_1</attalias><attrtype Sync="TRUE">Double</attrtype><attwidth Sync="TRUE">11</attwidth><atprecis Sync="TRUE">10</atprecis><attscale Sync="TRUE">2</attscale></attr><attr><attrlabl Sync="TRUE">NOM_CANTON</attrlabl><attalias Sync="TRUE">NOM_CANTON</attalias><attrtype Sync="TRUE">String</attrtype><attwidth Sync="TRUE">50</attwidth><atprecis Sync="TRUE">0</atprecis><attscale Sync="TRUE">0</attscale></attr></detailed></eainfo><mdDateSt Sync="TRUE">20131107</mdDateSt></metadata> diff --git a/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf new file mode 100644 index 0000000000000000000000000000000000000000..914f21f17258c66f1a5193656511afebfeafcbfa Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf differ diff --git a/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.prj b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.prj new file mode 100644 index 0000000000000000000000000000000000000000..5adb2a9108a4bd847464a2c506a40c8d199faa69 --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.prj @@ -0,0 +1 @@ +PROJCS["RGF93_Lambert_93",GEOGCS["GCS_RGF93",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["Meter",1]] \ No newline at end of file diff --git a/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.qpj b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.qpj new file mode 100644 index 0000000000000000000000000000000000000000..52a60bf44d9f6d1ad3e986837ac8859cf97d222f --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.qpj @@ -0,0 +1 @@ +PROJCS["RGF93 / Lambert-93",GEOGCS["RGF93",DATUM["Reseau_Geodesique_Francais_1993",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],AUTHORITY["EPSG","2154"]] diff --git a/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shp b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shp new file mode 100644 index 0000000000000000000000000000000000000000..136a230a16406a08af01e8768d4202c75090a204 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shp differ diff --git a/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shx b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shx new file mode 100644 index 0000000000000000000000000000000000000000..cc06bd1284d4bb12b249d442496e34f432b0adc0 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.shx differ diff --git a/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.dbf b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.dbf new file mode 100644 index 0000000000000000000000000000000000000000..ec7572fef0d0fc9664dde4a690b2e52d1289f508 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.dbf differ diff --git a/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.prj b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.prj new file mode 100644 index 0000000000000000000000000000000000000000..5adb2a9108a4bd847464a2c506a40c8d199faa69 --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.prj @@ -0,0 +1 @@ +PROJCS["RGF93_Lambert_93",GEOGCS["GCS_RGF93",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["Meter",1]] \ No newline at end of file diff --git a/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.qpj b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.qpj new file mode 100644 index 0000000000000000000000000000000000000000..52a60bf44d9f6d1ad3e986837ac8859cf97d222f --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.qpj @@ -0,0 +1 @@ +PROJCS["RGF93 / Lambert-93",GEOGCS["RGF93",DATUM["Reseau_Geodesique_Francais_1993",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],AUTHORITY["EPSG","2154"]] diff --git a/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shp b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shp new file mode 100644 index 0000000000000000000000000000000000000000..d9ef3b5653177d8ca8decbc8f3ade2b5b8a2078b Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shp differ diff --git a/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shx b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shx new file mode 100644 index 0000000000000000000000000000000000000000..69809ed8ddd5fa99f347d369875b6ff27258120e Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/Simu_irrigation.shx differ diff --git a/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf new file mode 100644 index 0000000000000000000000000000000000000000..3834277b52641ea860c88779e21074136be67493 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf differ diff --git a/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.prj b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.prj new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.qpj b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.qpj new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.qpj @@ -0,0 +1 @@ + diff --git a/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shp b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shp new file mode 100644 index 0000000000000000000000000000000000000000..6a0029a92acae533ee5df1caa9503d22c64d4d3d Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shp differ diff --git a/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shx b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shx new file mode 100644 index 0000000000000000000000000000000000000000..a9b0b52543cfa7c90858bde3a9a9109dc089b0b2 Binary files /dev/null and b/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.shx differ diff --git a/irrigation-R-codes/Irrigation/irrigation_table.csv b/irrigation-R-codes/Irrigation/irrigation_table.csv new file mode 100644 index 0000000000000000000000000000000000000000..3930103bc76432283142e31a93409797b93ccb14 --- /dev/null +++ b/irrigation-R-codes/Irrigation/irrigation_table.csv @@ -0,0 +1,14 @@ +Code;Culture;Type_irrigation +19;Vigne;GaG +20;Mais;Asp +21;Tournesol;Asp +22;Ble dur;Asp +23;Maraichage;GaG +24;Pomme de terre;Asp +25;Vergers;GaG +26;Prairies;Asp +27;Proteagineux;GaG +28;Riz;Grav +29;Jacheres;GaG +30;Jardins;GaG +31;Plantes industrielles;GaG diff --git a/irrigation-R-codes/Irrigation/readme.txt b/irrigation-R-codes/Irrigation/readme.txt new file mode 100644 index 0000000000000000000000000000000000000000..2b07ae0bfaaa923b4c4b6c9fd0cf2adc549be213 --- /dev/null +++ b/irrigation-R-codes/Irrigation/readme.txt @@ -0,0 +1,93 @@ +°HRUs_culture.csv : Fichier comprenant : + - colonne 1 : le numéro des HRUs + - colonne 2 : le type de culture +Dans un premier temps, les cantons ont été sélectionnés en fonction du ratio SAUirr/Scanton. Le canton est irrigué si le ratio est > 3% pour l'ensemble des cultures sauf le maraichage (1% pour le maraichage) +Un croisement a ensuite été effectué entre HRUs agricoles et cantons irrigués. Nous avons retenu uniquement les HRUs pour lesquelles la surface de la HRU est à au moins 50% sur le canton. +La culture dominante de la SAUirr du canton a été attribuée à chaque HRU agricole présente sur le canton. + +°HRUs_culture_test.csv : Fichier identique au fichier HRUs_culture.csv présenté ci-dessus avec en plus une colonne indiquant le canton auquel est rattaché la HRU +Ce fichier est issu du code R faisant les post-traitements. + +°irrigation_table.csv : Fichier faisant la liaison entre cultures (présentes dans HRUs_culture) et code présents dans le landuse + +°Bilan_irrigation.xlsx + - Code Canton : Code du canton + - SAUirr_sur_Scanton_en_pourcents : Rapport SAUirr / Surface Canton (en %) + - HRUs_irriguees : Surface des HRUs irriguées présentes sur le canton (en m2) + - Surface_Canton : Surface du canton (en m2) + - Pourc_canton_irrigue_HRUs : Rapport HRUs_irriguees/Surface_Canton (en %) + - DOM_SAU_IR : Culture dominante sur le canton + - Rapport_SAUirrigRGA_HRUirrig : Rapport SAUirr / HRUs_irriguees (-) + +°Bilan_simulation_irrigation.txt +Fichier dans lequel sont écrits les résultats des analyses de simulation avec irrigation (demande calculée, prélèvement réel, prélèvement agence sur les périodes 1987-2007 et 2008-2012) menées avec le code Analyse_Irrigation.r + +RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx +Fichier RGA par canton sans secret statistique + +Dans le dossier Resultats : +Fichiers comportant les résultats des simulations présentées dans le rapport MDR de novembre 2015. +Simulation avec et sans dose sur la période 1987 - 2007 (demande corrigée ou non) + + + +Dans le dossier Shapes : + +°Cantons_irrigues.shp : Couche comprenant les cantons irrigués finalement retenus +Les cantons ont été sélectionnés en fonction du ratio SAUirr/Scanton. +Le canton est irrigué si le ratio est > 3% pour l'ensemble des cultures sauf le maraichage (1% pour le maraichage). + - CODE_CAN_1 : Code du canton + - NOM_CANTON : Nom du canton + + - POURC_IRR : SAUirr / (SAUirr + SAU non_irr) + - SAU_TOT : SAU totale (en ares) = SAUirr + SAU non irr + - DOM_SAUTOT : Culture dominante sur la SAU totale + - P_DOM_TOT : Pourcentage de présence de la culture dominante (Surf culture dominante / SAU totale) + - SOM_S_TOT : Somme de Superficie totale (en ares) + - SOMSNO_SAU : Somme de Superfice non SAU (en ares) + - SAU_IRR : SAU irriguée (en ares) + - DOM_SAU_IR : Culture dominante sur la SAU irriguée + - PDOMSAUIRR : Pourcentage de présence de la culture dominante (Surf culture dominante / SAU irriguée) + - SAUNOIRR : SAU non irriguée (en ares) + - DOMSAUNOIR : Culture dominante sur la SAU non irriguée + - PDOMSAUNOI : Pourcentage de présence de la culture dominante (Surf culture dominante / SAU non irriguée) + - P_IRR_SAU : SAUirr / (SAUirr + SAU non_irr) + - SAUIRR_SCA : Ratio surface irriguée / surface canton + - AIRE_TOTAL : Surface du canton présente sur le bassin du Rhône (en m²) + - SCANT : Surface totale du canton (en m²) + - SCANTINBV : Surface du canton présente sur le bassin du Rhône (en m²) + - RTCANT : Ratio SCANTINBV / SCANT + - IRRPROPSAU : SAU irr / Scanton (en %) + - IRRPROPHRU : Somme HRUs irr / Scanton (en %) + - PHRU_PSAU : Ratio IRRPROPHRU / IRRPROPSAU = Somme HRUs irr / SAU irr + +°CantonsModif_BVRhone_IRR.shp +Couche cantons utilisées pour intégrer le RGA et sélectionner les cantons irrigués + +°hrus_irriguees_sur_Rhone.shp : Couche comprenant les HRUs irriguées finalement retenues +Les HRUs ont été retenues si elles sont présentes à au moins 50% de leurs surfaces sur le canton irrigué auquel elles sont rattachées +La table attributaire comporte uniquement les HRUs irriguées. +Elle correspond aux données présentes dans le fichier hrus.par. +La dernière colonne, CODE_CAN_1, correspond au canton auquel les HRUs sont rattachées + +°Simu_irrigation.shp : Couche dans laquelle sont écrits les résultats des simulations en sortie du code Analyse_Irrigation.r + - CODE_CAN_1 : Code du canton + - NOM_CANTON : Nom du canton + - ZONE_ETUDE : Pour rassemblement des cantons : 1 = Rhône, 2 = Durance, 3 = Saône) + - POURC_IRR : SAUirr / (SAUirr + SAU non_irr) + - SAU_IRR : SAU irriguée (en ares) + - DOM_SAU_IR : Culture dominante sur la SAU irriguée + - PDOMSAUIRR : Pourcentage de présence de la culture dominante (Surf culture dominante / SAU irriguée) + - P_IRR_SAU : SAUirr / (SAUirr + SAU non_irr) + - SAUIRR_SCA : Ratio surface irriguée / surface canton + - AIRE_TOTAL : Surface du canton présente sur le bassin du Rhône (en m²) + - SCANT : Surface totale du canton (en m²) + - SCANTINBV : Surface du canton présente sur le bassin du Rhône (en m²) + - RTCANT : Ratio SCANTINBV / SCANT + - IRRPROPSAU : SAU irr / Scanton (en %) + - IRRPROPHRU : Somme HRUs irr / Scanton (en %) + - PHRU_PSAU : Ratio IRRPROPHRU / IRRPROPSAU = Somme HRUs irr / SAU irr + - D1,D2,D3,D4... : respectivement demande calculée et demande calculée corrigée par le rapport SAUirr/SHRU sur les périodes 1987-2007 (1 et 2) et 2008-2012 (3 et 4) + - T1,T2,T3,T4... : respectivement transfert calculé et transfert calculé corrigé par le rapport SAUirr/SHRU sur les périodes 1987-2007 (1 et 2) et 2008-2012 (3 et 4) + - P1 et P3 : prélèvements agence sur 1987-2007 (P1) et 2008-2012 (P3) + diff --git a/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r b/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r new file mode 100644 index 0000000000000000000000000000000000000000..f69406e26f86f34a84ffb2eabb6a33a493c13c3f --- /dev/null +++ b/irrigation-R-codes/MDR_Analyse_Irrigation_IG.r @@ -0,0 +1,311 @@ +######################################################################################## +####### Comparaison des DEMANDE et TRANSFERTS modélisés aux PRELEVEMENTS AERMC ######### +######################################################################################## +# WARNING : since 17/05/2022, now using .dbf files in "user configuration" instead of .shp files +# the objectif being to not have to use maptools anymore + + +library(rgeos) +library(foreign) +library(sp) +library(raster) + +source('lib/utilitaire_irrigation.R') + + +# *** CONFIGURATION UTILISATEUR *** +# --------------------------------- + +# config='25MPS_Aleatoir' # == Nom_simu +# chemin="~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/" # chemin shape +# shp_file <- 'AleatoirIrrig_hrus_decoupees.shp' + + +#test3 +config <- 'test4_MA' #== Nom_simu +chemin <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/' # chemin shape +shp_file <- 'hrus_irriguees_sur_Rhone.dbf' + +# +# config='25MPS_surest' # == Nom_simu +# chemin="~/DATA/SIG_MDR/irrigation/shape_HRUs_Francois/" # chemin shape +# shp_file='hrus_irriguees_decoupees.shp' + +chemin_sortie <- '/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/' +pdfname <- paste0("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Comparaison_Irrig_", config, ".pdf") + + +# *** TRAITEMENT des SORTIES DU MODELE *** +# ---------------------------------------- + +Nom_simu <- config + +HRULoop <- ReadLoopDaily(paste0(chemin_sortie, Nom_simu, '/'), "HRULoop.dat", TRUE) +Dates <- HRULoop$dates + +# Code canton des HRUs irriguées +HRUs <- read.dbf(paste0(chemin, shp_file)) +regroup <- cbind(HRUs$CAT,HRUs$CODE_CAN_1) # join CAT - CODE_CAN_1 (deux id) +cantons <- regroup[order(regroup[,2]),] # sort +un_canton <- unique(cantons[,2]) # remove all duplicates + + +# Calcul des chroniques journalières : cantonXXX_Demande, cantonXXX_Transfert, de dimensions : ncol=nb_HRUS_in_canton, nrows=Ntime +for (cant in un_canton){ + Nom <- paste0('canton', cant) + HRUs_irr <- cantons[which(cantons[,2]== cant),1] # HRUs irriguées du canton "cant" + + Dem <- NULL + Transf <- NULL + for (k in HRUs_irr){ + Dem <- cbind(Dem,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationDemand')]) # L + Transf <- cbind(Transf,HRULoop$Data[which(HRULoop$Data[,1]==k),which(colnames(HRULoop$Data)=='irrigationTotal')]) # L + } + +assign(paste0(Nom, '_Demande'), Dem) +assign(paste0(Nom, '_Transfert'), Transf) +} + +# Calcul des Demande et Transferts annuels interannuels par canton +Demande_interannuelle <- NULL +Transfert_interannuel <- NULL + +for (cant in un_canton){ + Nom <- paste0('canton', cant) + obj1 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Demande')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 + obj2 <- aggregateZoo(na.omit(zoo(apply(get(paste0(Nom, '_Transfert')), 1, sum), Dates)), 'y', 'sum')/1000. # m3 + z_dem <- mean(obj1 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 + z_transf <- mean( obj2 [-c(1,2,24:28)] ) #Valeur sur 1987 - 2007 + Demande_interannuelle <- rbind( Demande_interannuelle, z_dem) + Transfert_interannuel <- rbind( Transfert_interannuel, z_transf) +} + +irrig_interannuelle_simu <- cbind(canton=un_canton,demande=Demande_interannuelle,transfert=Transfert_interannuel) +rownames(irrig_interannuelle_simu) <- NULL +colnames(irrig_interannuelle_simu) <- c('canton','demande','tranfert') + + +# *** PRELEVEMENTS AERMC *** +# -------------------------- +Prelev <- Prelev8182_1987_2007() # m3 . 81: GRAV ; 82 : non-grav +# Pour certains des cantons agricoles modélisés, les prélèvements sont nuls (eg le canton n'apparait pas dans les prélèvements de l'AERMC) +# => ajouter cette colonne aux prélèvements avec pour valeur 0 +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevAll <- prelev # m3/yr +colnames(PrelevAll) <- 'PrelevAll' + +Prelev <- Prelev82_1987_2007() +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevNonGrav <- prelev # m3/yr +colnames(PrelevNonGrav) <- 'PrelevNonGrav' + +Prelev <- Prelev8182_2008_2012() # +prelev <- NULL +for (cant in un_canton){ + if (length(Prelev[which(Prelev[,1] == cant)])>0) { + prelev <- rbind(prelev,Prelev[which(Prelev[,1] == cant),2]) + } else { + prelev <- rbind(prelev,0.) + } +} +PrelevAll_post2008 <- prelev # m3/yr +colnames(PrelevAll_post2008) <- 'PrelevAll_post2008' + +# *** GRAPHES DE COMPARAISON PRELEV - TRANSFERTS *** +# -------------------------------------------------- + +# tous les cantons +comparaison <- cbind(irrig_interannuelle_simu, PrelevAll, PrelevNonGrav, PrelevAll_post2008) +save(comparaison,file= paste0('~/Documents/MDR/irrigation/RDATA/Comparaison_Irrig_', config, '.Rdata')) + + +# Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,1333,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3009, 3016, 3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +# +# Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1307, 1309,1312,1326,1327,1331,8319,8408,8411) +# +# Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) + +# seuls ceux présents à >99% sur notre domaine +Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +# +Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1326,1327,8319,8408,8411) +# +Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) + +# fichier à charger pour avoir les cultures dominantes par canton +canton_cult <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +canton_cult <- canton_cult[, c('CODE_CAN_1','LANDUSEID')] +canton_cult <- canton_cult[!duplicated(canton_cult$CODE_CAN_1),] + + +pdf(pdfname,paper <- "special",width=8,height=14) +layout(matrix(c(1,3,5,1,3,5,1,3,5,2,4,5),3,4)) +par (pty="m") + +# Rhone +mat_Rhone <- as.matrix(comparaison[which(comparaison[,1] %in% Cantons_Rhone),2:6]) +cantonlist <- comparaison[which(comparaison[,1] %in% Cantons_Rhone),1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist,canton_cult$CODE_CAN_1),2]), start=1, stop=3) +row.names(mat_Rhone) <- paste(cantonlist, culturelist) +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.65) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen","black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2, cex.names=0.65) + +par (pty="m") +# Durance +mat_Durance <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Durance), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Durance), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Durance) <- paste(cantonlist, culturelist) +petits <- (which(mat_Durance[, 3]/1000000<20)) +barplot(t(mat_Durance[petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="DURANCE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,20),cex.names=0.65) +barplot(t(mat_Durance[-petits,])/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', las=2, ylim=c(0,150),cex.names=0.65) + +par (pty="m") +# Saone +mat_Saone <- as.matrix(comparaison[which(comparaison[, 1] %in%Cantons_Saone), 2:6]) +cantonlist <- comparaison[which(comparaison[, 1] %in% Cantons_Saone), 1] +culturelist <- substr(luid2cult(canton_cult[match(cantonlist, canton_cult$CODE_CAN_1), 2]), start=1, stop=3) +row.names(mat_Saone) <- paste(cantonlist, culturelist) +barplot(t(mat_Saone)/1000000, beside = TRUE, col = c("red", "blue", "green","forestgreen", "black"), legend.text = TRUE, main="SAONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', las=2,cex.names=0.65) + +graphics.off() + + + +# *** ANALYSE PAR TYPE DE CULTURE et SOUS-BASSINS (code en cours d'écriture) *** +# ------------------------------------------------------------------------------ + +# -- Irrigation par hectare irrigué, pour détection des erreurs de surface ou pb de choix de méthode d'irrigation +culture_hru <- luid2cult(HRUs$LANDUSEID) + +irrigarea_cant <- NULL # SAU irriguée par canton dans notre modélisation (proche valeurs du RGA) +culture_cant <- NULL +for (cant in un_canton){ + irrigarea_cant <- c(irrigarea_cant, sum(HRUs$AREA[which(HRUs$CODE_CAN_1 ==cant)])/10000.) #hectares + culture_cant <- c(culture_cant, unique(culture_hru[which(HRUs$CODE_CAN_1 ==cant)])) +} + +comparaison_surf <- cbind(un_canton, irrig_interannuelle_simu[, 2:3]%/%irrigarea_cant, PrelevAll%/%irrigarea_cant) + +Rhone <- comparaison_surf[which(comparaison_surf[, 1] %in% Cantons_Rhone),] +cultures_Rhone <- culture_cant[which(un_canton %in% Cantons_Rhone )] + +N <- length(unique(cultures_Rhone)) #==> 5 types de culture : "Mais" "Prairies" "Vergers" "Vigne" "maraichage" +culture_locale <- unique(cultures_Rhone) + +pdfname <- paste0("~/Documents/MDR/irrigation/ComparaisonSurf_Rhone_", config, ".pdf") +pdf(pdfname,paper="special",width=8,height=14) +layout(matrix(1:N, N, 1)) +par (pty="m") + +for (cult in 1:N){ + mat <- as.matrix(Rhone[which(cultures_Rhone==culture_locale[cult]), 2:4]) + row.names(mat) <- Rhone[which(cultures_Rhone==culture_locale[cult]), 1] + barplot(t(mat), beside = TRUE, col = c("red", "blue", "green"), legend.text = TRUE, main= paste0('Rhone , ', culture_locale[cult]), xlab='cantons', ylab='m3 / hectare', las=2) +} + +graphics.off() +mat_Rhone <- as.matrix(comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 2:5]) +row.names(mat_Rhone) <- comparaison[which(comparaison[, 1] %in% Cantons_Rhone), 1] +petits <- (which(mat_Rhone[, 3]/1000000<10)) +barplot(t(mat_Rhone[petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="RHONE : Demande,Transfert et Prélèvements AERMC sur 1987-2007", xlab='cantons', ylab='Mm3', ylim=c(0,10),border=NA,las=2, cex.names=0.75) +barplot(t(mat_Rhone[-petits,])/1000000., beside = TRUE, col = c("red", "blue", "green","forestgreen"), legend.text = TRUE, main="", xlab='cantons', ylab='Mm3', ylim=c(0,150),las=2) + +######################################################################################## +####### Ajout de la contrainte Q > 10% MA (Module Annuel) pour Prélèv Irrigation ####### +######################################################################################## + +# fichiers pour extraction des MA : +simufile <- '~/JAMS/modeldata/J2K_Rhone_Barrages/output/BAR/' +filename <- 'ReachLoop.dat' + +# le vieux et nouveau Reach.par +paramdir <- '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/' +oldparfile <- 'reach.par' +newparfile <- 'reach_MA.par' # avec nouveau param MA + +# code +rloop <- ReadLoopDaily(simufile, filename, FALSE) +MA <- NULL +Nbreach <- dim(rloop$Data)[1]/length(rloop$dates) +Nbtime <- length(rloop$dates) +Ntot <- dim(rloop$Data)[1] +for (i in (1:Nbreach)){ # 1 to 3075 reaches + index <- seq(i, Ntot, Nbreach) + chronique <- rloop$Data[index, 2] + MA_tmp <- mean(chronique) + MA <- c(MA, MA_tmp) # L/d +} + +ID <- rloop$Data[1:Nbreach, 1] +MA <- rbind(ID, MA) + +reaches <- Chargement_param(oldReachParfile, parfile) +order <- match(ID, reaches$V1) # l'ordre des simus est inversé par rapport à l'ordre du reach.par... +MA <- MA[, order] + +newparamName <- "MA" +newparamVal <- round(MA[2,]) +newparamUnit <- "L/d" +add_param(paramdir,oldparfile,newparfile,newparamName,newparamVal,newparamUnit) + +######################################################################################## +####### Impact de la paramétrisation MA sur les débits journaliers sur qques HRUS ###### (en cours) +######################################################################################## + +# ex : cantons (retenu) 3909 (reach6222) : forte baisse de la demande suite à introduction de cette paramétrisation. +simnewdir <- paste0(chemin_sortie, config, "/") +filename <- 'ReachLoop.dat' + +simrefdir <- '~/JAMS/modeldata/J2K_Rhone_Natural_Hydrology/output/newREF/' + +simolddir <- paste0(chemin_sortie, 'test3', "/") + +myreach <- 6222 + +# code + +# avec MA +rloopnew <- ReadLoopDaily(simnewdir, filename, FALSE) +runoffnew <- rloopnew$Data[which(rloopnew$Data[, 1]==myreach), 'simRunoff'] +runoffnew <- xts(runoffnew, as.POSIXct(rloopnew$dates, format='%Y-%m-%d')) + +# Hydro Nat +rloopref <- ReadLoopDaily(simrefdir, filename, FALSE) +runoffref <- rloopref$Data[which(rloopref$Data[, 1]==myreach), 'simRunoff'] +runoffref <- xts(runoffref, as.POSIXct(rloopref$dates, format='%Y-%m-%d')) + +# irrig sans MA +rloopold <- ReadLoopDaily(simolddir, filename, FALSE) +runoffold <- rloopold$Data[which(rloopold$Data[, 1]==myreach), 'simRunoff'] +runoffold <- xts(runoffold, as.POSIXct(rloopold$dates, format='%Y-%m-%d')) + +MA <- mean(runoffref) +MAts <- xts(rep(MA, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) +MA10ts <- xts(rep(MA*.1, length(runoffref)), as.POSIXct(rloopref$dates, format='%Y-%m-%d')) + +year <- '2009' +period <- paste0(year, '-05-01/', year, '-10-31') +plot(runoffref[period],ylim=c(0, MA*2), main="débit à l'aval du canton 3909 (CHEMIN, cult=mais, petit affluent", ylab='L/d') +lines(runoffnew[period],col=2) +lines(runoffold[period],col=4) +lines(MAts[period], col=3) +lines(MA10ts[period], col=3, lty=4) + +legend("topright",legend=names(variablesfut),y.intersp = 1, lty= 1,bty="n",col = colors,xpd=NA,cex=0.8) + diff --git a/irrigation-R-codes/MDR_areaselect_irrigated_HRUs.r b/irrigation-R-codes/MDR_areaselect_irrigated_HRUs.r new file mode 100644 index 0000000000000000000000000000000000000000..cdba5161258cd2cd86b12650f67733c8f7c7e8fe --- /dev/null +++ b/irrigation-R-codes/MDR_areaselect_irrigated_HRUs.r @@ -0,0 +1,236 @@ +#~****************************************************************************** +#~* Selects the HRUs where irrigation is to be deployed, based on their area +#~* , canton location (irrigated or not) and comparison to SAU_irr_in_canton +#~* +#~* The area of diverse combinations of HRUs is compared to the SAU_irr_in_canton, +#~* starting with combinations of only 1 HRU in the canton, and increasing till +#~* being over the SAU_irr_in_canton. +#~* If the single-element alreading brings the HRU irrigated area above the SAU_irr_in_canton +#~* , the smallest HRU in the canton is irrigated and all others are not. +#~* The eligible combinations are tested in 3 passes with increasing tolerance to +#~* error in the total HRU irrigated area : 10 %, 30 % and 100 %. +#~* +#~* The results are +#~* * a vector irrigated (value : 0 or 1) with length: N_hrus_total +#~* * a vector irrig_type (value : 0, 1 or 2) with length: N_hrus_total. +#~* 1 = asp ; 2 = gag +#~* * a vector area_error indicating the % of error in surface committed with the new HRUirr +#~* +#~* - Le programme calcule les indices des HRUs qui irriguent un canton, en utilisant l'algorithme de combinaison de nombres afin d'additionner les surfaces des HRUs pour trouver la surface la plus proche possible de la surface totale du canton. +#~* +#~* - Le programme calcule également la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +#~* +#~* - Le programme écrit les résultats dans un fichier externe. +# +#~* - Le fichier externe contient 5 colonnes. La première est le numéro du HRU, la deuxième est sa surface, la troisième indique si le HRU irrigue le canton (1) ou non (0), la quatrième donne le type d'irrigation du HRU et la cinquième donne la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +# %). + +#~****************************************************************************** +#~* PROGRAMMER: Isabelle GOUTTEVIN (Irstea Lyon) +#~****************************************************************************** +#~* CREATED/MODIFIED: +# Created 2015-12-09 by Isabelle GOUTTEVIN (Irstea Lyon) +# Modified 2022-05-04 by Theo L (INRAE Lyon) +#~****************************************************************************** +setwd("/home/tlabrosse/Bureau/maestro/irrigation-R-codes/") +library(combinat) +library(foreign) +source("lib/rgate/Stub.R") + +rcvStub = ReceiverStub$new() + + +# *** FONCTIONS *** + +# ------------------------------------------------------------------------ +# index_of_nearest <- function(x, number){ +# Finds the index of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The index of the element in x that is closest to number +# ------------------------------------------------------------------------ +index_of_nearest <- function(x, number){ + return (which(abs(x-number)==min(abs(x-number))))} + + + + +# ------------------------------------------------------------------------ +#value_of_nearest(c(5,2,1),6) +# value_of_nearest <- function(x, number) +# Finds the value of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The value of the element in x that is closest to number +# ------------------------------------------------------------------------ +value_of_nearest <- function(x, number){ + return (x[which(abs(x-number)==min(abs(x-number)))])} + + + + +# ------------------------------------------------------------------------ +# try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance) +# Tries to find the combination of HRUs that best fits the given irrigation area. +# Args: +# n: The current number of HRUs to be added to the combination +# S_HRUs: A vector of HRUs' surface +# S_irr_Canton: The target irrigation area +# tolerance: The maximum error tolerated by the user +# Returns: +# The index of the HRUs that best fit the target irrigation area, +# " continue " if the current combination does not work but a smaller combination might, +# " non convergence " if the current combination does not work and neither does a smaller combination. +# ------------------------------------------------------------------------ +try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance){ + + if (n < length(S_HRUs)){ + combi <- combn(S_HRUs, n) + } else { + combi <- t(t(S_HRUs)) + } + sumcombi <- apply(combi, 2, sum) + + + nearestarea <- value_of_nearest(sumcombi, S_irr_Canton) + error_nearest <- abs(1-nearestarea/S_irr_Canton)*100. + + if (error_nearest[1] < tolerance){ + + combi_selected <- index_of_nearest(sumcombi, S_irr_Canton) + index_selected <- NULL + for (i in 1:n){ + index_selected <- c(index_selected, which(S_HRUs==combi[, combi_selected][i])) + } + return (index_selected) + + } else if (min(sumcombi) > S_irr_Canton){ + + if (n==1){ + return(which(sumcombi==min(sumcombi))) + } else { + return ("non convergence") + } + } else { + return ("continue") + } +} + +# ------------------------------------------------------------------------ +# main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) +# Main function of the irrigation assignment process. +# Args: +# hrus_irrig_cantons_filePath: The path to the HRUs irrigated cantons data file +# cantons_irrigues_filePath: The path to the cantons irrigated data file +# Returns: +# A file with the irrigation status of every HRU +# ------------------------------------------------------------------------ +main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath, output_dir) { + hrus_irrig_cantons <- read.dbf(hrus_irrig_cantons_filePath) + cantons_irrigues <- read.dbf(cantons_irrigues_filePath) + + + + # mélange des lignes pour pouvoir avoir un résultat vraiment aléatoire + hrus_irrig_cantons= hrus_irrig_cantons[sample(seq_len(nrow(hrus_irrig_cantons))), ] + cantons_irrigues= cantons_irrigues[sample(seq_len(nrow(cantons_irrigues))), ] + + + N_hru <- dim(hrus_irrig_cantons)[1] + + # creates two vector of the size of the number of currently irrigated HRUs + irrigated <- rep(0, N_hru) + area_error <- rep(0, N_hru) + + # creates a vector of the size of the number irrigated cantons + canton_traite <- rep(0, dim(cantons_irrigues)[1]) + + + tolerances <- c(10, 30, 100) + + for(tolerance in tolerances) { + for (numcanton in cantons_irrigues$CODE_CAN_1[which(canton_traite==0)]){ + + indice_canton <- which(cantons_irrigues$CODE_CAN_1==numcanton) + + # Find the HRU of the current canton + hrus <- hrus_irrig_cantons[which(hrus_irrig_cantons$CODE_CAN_1==numcanton), ] + + if (dim(hrus)[1]<=0){ + canton_traite[indice_canton] <- 1 + } else { + indices <- which(hrus_irrig_cantons$CODE_CAN_1==numcanton) # trouve le(s) HRU(s) associe au canton etudie + S_HRUs <- hrus$AREA # surface du/des HRU(s) en m2 + + S_irr_Canton <- cantons_irrigues[which(cantons_irrigues$CODE_CAN_1==numcanton), ]$SAU_IRR*100. # le "*100" lie au fait que les donnees du RGA sont en ares = 100m2 + + index_of_HRUs <- "continue" + n_elements_combi <- 1 + while ((index_of_HRUs=="continue") && (n_elements_combi <= length(S_HRUs))){ + index_of_HRUs <- try_combination(n_elements_combi, S_HRUs, S_irr_Canton, tolerance) + n_elements_combi <- n_elements_combi+1 + } + + if (index_of_HRUs=="non convergence" || index_of_HRUs=="continue") { + irrigated[indices] <- NA + } else { + irrigated[indices] <- 0 + irrigated[indices[index_of_HRUs]] <- 1 + area_error[indices] <- (sum(S_HRUs[index_of_HRUs])/S_irr_Canton-1)*100. + + canton_traite[indice_canton] <- 1 + } + } + } + } + + + irrig_type <- rep(0, N_hru) + + # il est entrain de mettre un vector dans chaque case du vector là , je me trompe ? + irrig_type[which(irrigated >0)] <- hrus_irrig_cantons$IRRIG_TYPE[which(irrigated >0)] + + + file = output_dir; + write.table(cbind(hrus_irrig_cantons$CAT,hrus_irrig_cantons$AREA, irrigated, irrig_type, area_error),file ,append=F, sep="\t", row.names=FALSE, col.names=c('HRUnum', 'HRUarea', 'irrigated', 'irrig_type', 'area_error')) +} + +# *** MAIN CODE *** +# ----------------- + +hruFile = rcvStub$getArgument("HRU_file")$value +cantonFile = rcvStub$getArgument("cantons_file")$value +output_dir = rcvStub$getArgument("output_dir")$value + +# utilise des données par défauts s'il n y a rien d'autres +if(is.null(cantonFile) || is.null(hruFile)) { + cantonFile = "/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf" + hruFile = "/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf" +} + +main(hruFile, + cantonFile, + output_dir +) + + + + + +# Annexe : creation de la table des surfaces irriguees modelisees par canton + +# library(foreign) + +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } + +# write.table(cbind(sort(unique(HRU_Aleatoir$CODE_CAN_1)),SHRUirr_can),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Bilan_HRU_Aleatoir.txt',append=F, sep="\t", row.names=FALSE, col.names=c('canton', 'HRUirrig_area')) + diff --git "a/irrigation-R-codes/MDR_select_culture_irrigu\303\251e.r" "b/irrigation-R-codes/MDR_select_culture_irrigu\303\251e.r" new file mode 100644 index 0000000000000000000000000000000000000000..22a118c829bf70d5a5108b00f242f09352f8e1bd --- /dev/null +++ "b/irrigation-R-codes/MDR_select_culture_irrigu\303\251e.r" @@ -0,0 +1,203 @@ +#***** SCRIPT déterminant la culture dominante à affecter à une HRU ******* +# *** en fonction des données de culture irriguées du RGA et *** +# * du besoin en eau théorique de chaque culture présente * + + +# auteur : IG +# date : 15-12-2015 + +# -------------------------------------------------------------------------- +library(gdata) +library(lubridate) +library(foreign) + +source('lib/utilitaire_irrigation.R') + + +# 1. Rassembler les différentes sources de données +# ************************************************ + +# RGA +#-------------- + +RGA <- read.xls('~/Documents/MDR/irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx', sheet = 3) +cantons <- RGA[, 1] +cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère", "Divers", "Industrielles") +RGA <- RGA[, c(19:26, 28, 30, 31, 29, 27)]# colonnes irriguées, 13 types de culture. ACGTUNG !!! les colonnes ne sont pas dans le meme ordre que dans J2000 !! +rownames(RGA) <- cantons +colnames(RGA) <- cultures + + +# param de J2000 +#----------------- + +# nom abrégé et numéro des cultures +# cette manière de faire est fragile, si les données venait à changer, il faudrait changer le code +numJ2000_cultures <- 19:31 +numnomcultures <- rbind(cultures, numJ2000_cultures) + +# Kc mensuels par cultures +luparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'landuse.par') +kc <- luparam[19:31, 3:14] +colnames(kc) <- 1:12 +rownames(kc) <- cultures + +# Période d'irrigation, transformées en fraction mensuelles +irrigstart <- luparam[19:31, 36] +irrigend <- luparam[19:31, 37] + # on met des valeurs réalistes là où pas d'info de base +# on dirait que cette partie de code à été fait pour des données très précise, pas sur que ça marche avec autre chose +irrigstart[9] <- irrigstart[6]; irrigend[9] <- irrigend[6] # prot==pdt +irrigstart[10] <- 100; irrigend[10] <- 250 # riz +irrigstart[11] <- irrigstart[2]; irrigend[11] <- irrigend[2] # jach et autres = mais +irrigstart[12] <- irrigstart[7]; irrigend[12] <- irrigend[7] # jardins et autres == vergers +irrigstart[13] <- irrigstart[7]; irrigend[13] <- irrigend[7] # industrielles == prairies + +date1 <- ymd_hms("2000/01/01 00:00:00") +irrigperiod <- interval(as.Date(irrigstart, date1), as.Date(irrigend, date1)) +debutmois <- c(date1, date1+months(1:11)) +finmois <- date1+months(1:12) +monthsperiods <- interval(debutmois, finmois) + +wheightedperiod <- NULL +for (cult in seq_along(cultures)){ + wheightedperiod <- rbind(wheightedperiod, as.period(intersect(monthsperiods, irrigperiod[cult]))/months(1)) +} +wheightedperiod[which(is.na(wheightedperiod))] <- 0 # rmqs : le calcul n'est pas tout à fait exact en raison de la conversion imprécise JulianDay -> Date + # pour la période d'irrigation ==> à améliorer. + + +# ETO mensuelle interannuelle par hru irriguée +# -------------------------------------------- +# This part of the program reads in a file of data on irrigated HRUs and loops through each HRU to calculate the monthly reference evapotranspiration. +# It outputs a file called "HRULoop.dat" with the monthly reference evapotranspiration for each HRU. +# It also assigns a variable to each HRU's monthly reference evapotranspiration, with the variable name being "refET_" followed by the HRU number. +# So, for example, if HRU 1 had a monthly reference evapotranspiration of 3 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_1" with the value 3. +# If HRU 2 had a monthly reference evapotranspiration of 4 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_2" with the value 4. +# And so on. +# The program does not produce any visual output. +# It is important to note that this program requires the source('lib/aggregateZoo_functions.r') in order to run properly. +# +# - liste des HRUs irriguées +hrus_all <- read.csv('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/Irrigated_AleatoireHRUselect.csv') +irrigated <- hrus_all[which(hrus_all$irrigated ==1),] + +HRULoop <- ReadLoopDaily('~/JAMS/modeldata/J2K_Rhone_Irrigation/output/refET/',"HRULoop.dat",TRUE) +Dates <- HRULoop$dates + +# - ET0 mensuelles interannuelles +for (myhru in irrigated[,1]){ + myrefET <- HRULoop$Data[which(HRULoop$Data[, 1]==myhru), which(colnames(HRULoop$Data)=='refET')] + myrefET <- aggregateZoo(zoo(myrefET, Dates), 'm', 'mean') + assign(paste0('refET_', myhru), myrefET) +} + +# 2. Comparaison des besoins théoriques sur chaque HRU et affectation du type de culture irrigué +# *********************************************************************************************** + +hrus_et_cantons <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus_decoupees.dbf') +culture_finale <- NULL +for (hrus in hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)]){ + + un_canton <- hrus_et_cantons$CODE_CAN_1[which(hrus_et_cantons$CAT==hrus)] + sommeprod <- (as.matrix(kc) *as.matrix(wheightedperiod)) %*% as.vector(get(paste0('refET_', hrus))) # une valeur par culture + refETmoyyear_ponderee <- t(sommeprod)*RGA[as.character(un_canton), 1:13] # .. pondérée par la surface en culture sur le canton. + culture_retenue <- cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + numculture_retenue <- numJ2000_cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + culture_finale <- c(culture_finale, numculture_retenue) +} + + + +# 3. modifications du fichiers hrus.par et des .dbf +# *************************************************** + +# hrus.par +#------------- +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'hrus.par') +culture_init <- hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] +#culture_finale[which(culture_init!=culture_finale)] +corresp_irrigtype <- c(2, 1, 1, 1, 2, 1, 2, 1, 2, 3, 1, 1, 2) +irrigtype <- NULL +for (cult in culture_finale){ + ind <- which(numJ2000_cultures==cult) + irrigtype <- c(irrigtype, corresp_irrigtype[ind]) +} +# 11: landuseID +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] <-culture_finale +# 15: irrigated +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 15] <- 1 +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 15] <- 0 +# 16: irrig_type +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 16] <- irrigtype +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 16] <- 0 + + +# on remet à 4 les landuseID agricoles des HRUs qui ne sont plus irriguées maintenant (proposition pour plus tard : on met la culture dominante non-irriguée, pour prendre en compte des Kc améliorés) +# c'est un peu compliqué car on n'a plus le hrus.par de référence sans irrigation.... +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ) & (hruparam$V11>18) ), 11] <-4 +# step 1: Montagne (V4 : slope ; V11: landuseID) +indices <- which((hruparam$V4 > 10) & (hruparam$V11== 4)) +if (length(indices !=0)){ + hruparam[indices,11] <-12} + +# step 2: Dombes +Dombes_Chalaronne <- 6832 +Dombes_Veyle <- 6800 + +reach <- Chargement_param ('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','reach.par') +indID <- 1 +indLand <- 11 +indSub <- 9 + +brins_chala <- Topologie(Dombes_Chalaronne, reach) +brins_veyle <- Topologie(Dombes_Veyle, reach) + +Total_hru_Chala <- NULL +for (k in brins_chala){ + Total_hru_Chala <- c (Total_hru_Chala,hruparam[hruparam[,indSub] == k,indID])} +Total_hru_Veyle <- NULL +for (k in brins_veyle){ + Total_hru_Veyle <- c (Total_hru_Veyle,hruparam[hruparam[,indSub] == k,indID])} + +for (k in Total_hru_Chala){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ])) != 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +for (k in Total_hru_Veyle){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ]))!= 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +#test : which(hruparam$V11 == 18) +write_new_paramfile('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus.par', hruparam, '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus_AleatoirIrrig_NewCult.par') + + + +# .dbf +#------------- + +# dbf decoupee sur irrig +hrus_et_cantons$LANDUSEID[order(hrus_et_cantons$CAT)]<-culture_finale +hrus_et_cantons$IRRIG_TYPE[order(hrus_et_cantons$CAT)]<-irrigtype + +# dbf de toutes les hrus +hrus_irrigation_all <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus.dbf') +for (hrus in hrus_et_cantons$CAT){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$LANDUSEID[which(hrus_et_cantons$CAT == hrus)] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$IRRIG_TYPE[which(hrus_et_cantons$CAT == hrus)] +} +hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT)] <-1 +hrus_irrigation_all$IRRIGATED[which(!(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT))] <-0 + +# 4/1/2015 correction ex-post pour rétablir 4, 12 ou 18 selon agri plaine, montagne, dombes: +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','hrus_AleatoirIrrig_NewCult.par') +for (hrus in hruparam$V1){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),11] + hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),15] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),16] +} +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_corr.dbf') + + +# écriture +write.dbf(hrus_et_cantons, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus.dbf') + diff --git a/irrigation-R-codes/OuvragePrel_RHONE_source_vol.dbf b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.dbf new file mode 100644 index 0000000000000000000000000000000000000000..c5364665696a1b5a5fb8ff70121d8741a42cd93d Binary files /dev/null and b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.dbf differ diff --git a/irrigation-R-codes/OuvragePrel_RHONE_source_vol.prj b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.prj new file mode 100644 index 0000000000000000000000000000000000000000..d2529753fb55e10d1e59f3dc6e12f4f1481f3900 --- /dev/null +++ b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.prj @@ -0,0 +1 @@ +PROJCS["RGF93_Lambert_93",GEOGCS["GCS_RGF93",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shp b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shp new file mode 100644 index 0000000000000000000000000000000000000000..4724e5e27dffa4d748c40834e19c992a33ba83d8 Binary files /dev/null and b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shp differ diff --git a/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shx b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shx new file mode 100644 index 0000000000000000000000000000000000000000..28b36e1d6ddabd7621cf61f56d93ca49091785b7 Binary files /dev/null and b/irrigation-R-codes/OuvragePrel_RHONE_source_vol.shx differ diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.cpg b/irrigation-R-codes/data_exemple_irrigation/bv_tille.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/bv_tille.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.dbf b/irrigation-R-codes/data_exemple_irrigation/bv_tille.dbf new file mode 100644 index 0000000000000000000000000000000000000000..d5b5a3cb2c7a1ef085819d8352c95704fcf086ab Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/bv_tille.dbf differ diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.prj b/irrigation-R-codes/data_exemple_irrigation/bv_tille.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/bv_tille.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.qpj b/irrigation-R-codes/data_exemple_irrigation/bv_tille.qpj new file mode 100644 index 0000000000000000000000000000000000000000..52a60bf44d9f6d1ad3e986837ac8859cf97d222f --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/bv_tille.qpj @@ -0,0 +1 @@ +PROJCS["RGF93 / Lambert-93",GEOGCS["RGF93",DATUM["Reseau_Geodesique_Francais_1993",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],AUTHORITY["EPSG","2154"]] diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.shp b/irrigation-R-codes/data_exemple_irrigation/bv_tille.shp new file mode 100644 index 0000000000000000000000000000000000000000..8e1893607af8dd38411f188ec7ce152ee3e761e1 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/bv_tille.shp differ diff --git a/irrigation-R-codes/data_exemple_irrigation/bv_tille.shx b/irrigation-R-codes/data_exemple_irrigation/bv_tille.shx new file mode 100644 index 0000000000000000000000000000000000000000..22b74470b0b0b27c362926acc3fd8b1d9a3c08db Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/bv_tille.shx differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/.grassrc b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/.grassrc new file mode 100644 index 0000000000000000000000000000000000000000..8cf226c36fa7b6b161b5bd4c1c629630293eff99 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/.grassrc @@ -0,0 +1,3 @@ +GISDBASE: /home/michael.rabotin/1_HYBV/HRU_DELIN/hru-delin/data_exemple/grass_db/grassdata +LOCATION_NAME: hru-delin +MAPSET: PERMANENT diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/DEFAULT_WIND b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/DEFAULT_WIND new file mode 100644 index 0000000000000000000000000000000000000000..7d25eeffc2e97a0059a50d5bdb6b01863fbbafeb --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/DEFAULT_WIND @@ -0,0 +1,18 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +top: 1.000000000000000 +bottom: 0.000000000000000 +cols3: 202 +rows3: 186 +depths: 1 +e-w resol3: 100 +n-s resol3: 100 +t-b resol: 1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_EPSG b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_EPSG new file mode 100644 index 0000000000000000000000000000000000000000..048c4de6495dd9968a0ac7743ad2fb637694b9e9 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_EPSG @@ -0,0 +1 @@ +epsg: 2154 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_INFO b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_INFO new file mode 100644 index 0000000000000000000000000000000000000000..78ec27c621579f739fb056f50a8862f976593f49 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_INFO @@ -0,0 +1,11 @@ +name: RGF93 / Lambert-93 +ellps: grs80 +proj: lcc +lat_0: 46.5 +lon_0: 3 +lat_1: 49 +lat_2: 44 +x_0: 700000 +y_0: 6600000 +towgs84: 0,0,0,0,0,0,0 +no_defs: defined diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_UNITS b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_UNITS new file mode 100644 index 0000000000000000000000000000000000000000..28243d2cf27590185a81cd2969005febb19b368c --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/PROJ_UNITS @@ -0,0 +1,3 @@ +unit: meter +units: meters +meters: 1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/WIND b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/WIND new file mode 100644 index 0000000000000000000000000000000000000000..7d25eeffc2e97a0059a50d5bdb6b01863fbbafeb --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/WIND @@ -0,0 +1,18 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +top: 1.000000000000000 +bottom: 0.000000000000000 +cols3: 202 +rows3: 186 +depths: 1 +e-w resol3: 100 +n-s resol3: 100 +t-b resol: 1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..fc816fb471edac3222c4d96fa3f9ca46ce4d3b36 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/accum_wk @@ -0,0 +1,4 @@ +# 0 categories +Watershed accumulation: overland flow that traverses each cell + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/asp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/asp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/asp_rcl @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..56cec2e25f80840b7094f89b22cbc3b1ba34db5b --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_aspect @@ -0,0 +1,365 @@ +# 360 categories +Aspect counterclockwise in degrees from east + +0.00 0.00 0.00 0.00 +359.5:360.5:east +358.5:359.5:359 degrees ccw from east +357.5:358.5:358 degrees ccw from east +356.5:357.5:357 degrees ccw from east +355.5:356.5:356 degrees ccw from east +354.5:355.5:355 degrees ccw from east +353.5:354.5:354 degrees ccw from east +352.5:353.5:353 degrees ccw from east +351.5:352.5:352 degrees ccw from east +350.5:351.5:351 degrees ccw from east +349.5:350.5:350 degrees ccw from east +348.5:349.5:349 degrees ccw from east +347.5:348.5:348 degrees ccw from east +346.5:347.5:347 degrees ccw from east +345.5:346.5:346 degrees ccw from east +344.5:345.5:345 degrees ccw from east +343.5:344.5:344 degrees ccw from east +342.5:343.5:343 degrees ccw from east +341.5:342.5:342 degrees ccw from east +340.5:341.5:341 degrees ccw from east +339.5:340.5:340 degrees ccw from east +338.5:339.5:339 degrees ccw from east +337.5:338.5:338 degrees ccw from east +336.5:337.5:337 degrees ccw from east +335.5:336.5:336 degrees ccw from east +334.5:335.5:335 degrees ccw from east +333.5:334.5:334 degrees ccw from east +332.5:333.5:333 degrees ccw from east +331.5:332.5:332 degrees ccw from east +330.5:331.5:331 degrees ccw from east +329.5:330.5:330 degrees ccw from east +328.5:329.5:329 degrees ccw from east +327.5:328.5:328 degrees ccw from east +326.5:327.5:327 degrees ccw from east +325.5:326.5:326 degrees ccw from east +324.5:325.5:325 degrees ccw from east +323.5:324.5:324 degrees ccw from east +322.5:323.5:323 degrees ccw from east +321.5:322.5:322 degrees ccw from east +320.5:321.5:321 degrees ccw from east +319.5:320.5:320 degrees ccw from east +318.5:319.5:319 degrees ccw from east +317.5:318.5:318 degrees ccw from east +316.5:317.5:317 degrees ccw from east +315.5:316.5:316 degrees ccw from east +314.5:315.5:south ccw of east +313.5:314.5:314 degrees ccw from east +312.5:313.5:313 degrees ccw from east +311.5:312.5:312 degrees ccw from east +310.5:311.5:311 degrees ccw from east +309.5:310.5:310 degrees ccw from east +308.5:309.5:309 degrees ccw from east +307.5:308.5:308 degrees ccw from east +306.5:307.5:307 degrees ccw from east +305.5:306.5:306 degrees ccw from east +304.5:305.5:305 degrees ccw from east +303.5:304.5:304 degrees ccw from east +302.5:303.5:303 degrees ccw from east +301.5:302.5:302 degrees ccw from east +300.5:301.5:301 degrees ccw from east +299.5:300.5:300 degrees ccw from east +298.5:299.5:299 degrees ccw from east +297.5:298.5:298 degrees ccw from east +296.5:297.5:297 degrees ccw from east +295.5:296.5:296 degrees ccw from east +294.5:295.5:295 degrees ccw from east +293.5:294.5:294 degrees ccw from east +292.5:293.5:293 degrees ccw from east +291.5:292.5:292 degrees ccw from east +290.5:291.5:291 degrees ccw from east +289.5:290.5:290 degrees ccw from east +288.5:289.5:289 degrees ccw from east +287.5:288.5:288 degrees ccw from east +286.5:287.5:287 degrees ccw from east +285.5:286.5:286 degrees ccw from east +284.5:285.5:285 degrees ccw from east +283.5:284.5:284 degrees ccw from east +282.5:283.5:283 degrees ccw from east +281.5:282.5:282 degrees ccw from east +280.5:281.5:281 degrees ccw from east +279.5:280.5:280 degrees ccw from east +278.5:279.5:279 degrees ccw from east +277.5:278.5:278 degrees ccw from east +276.5:277.5:277 degrees ccw from east +275.5:276.5:276 degrees ccw from east +274.5:275.5:275 degrees ccw from east +273.5:274.5:274 degrees ccw from east +272.5:273.5:273 degrees ccw from east +271.5:272.5:272 degrees ccw from east +270.5:271.5:271 degrees ccw from east +269.5:270.5:south +268.5:269.5:269 degrees ccw from east +267.5:268.5:268 degrees ccw from east +266.5:267.5:267 degrees ccw from east +265.5:266.5:266 degrees ccw from east +264.5:265.5:265 degrees ccw from east +263.5:264.5:264 degrees ccw from east +262.5:263.5:263 degrees ccw from east +261.5:262.5:262 degrees ccw from east +260.5:261.5:261 degrees ccw from east +259.5:260.5:260 degrees ccw from east +258.5:259.5:259 degrees ccw from east +257.5:258.5:258 degrees ccw from east +256.5:257.5:257 degrees ccw from east +255.5:256.5:256 degrees ccw from east +254.5:255.5:255 degrees ccw from east +253.5:254.5:254 degrees ccw from east +252.5:253.5:253 degrees ccw from east +251.5:252.5:252 degrees ccw from east +250.5:251.5:251 degrees ccw from east +249.5:250.5:250 degrees ccw from east +248.5:249.5:249 degrees ccw from east +247.5:248.5:248 degrees ccw from east +246.5:247.5:247 degrees ccw from east +245.5:246.5:246 degrees ccw from east +244.5:245.5:245 degrees ccw from east +243.5:244.5:244 degrees ccw from east +242.5:243.5:243 degrees ccw from east +241.5:242.5:242 degrees ccw from east +240.5:241.5:241 degrees ccw from east +239.5:240.5:240 degrees ccw from east +238.5:239.5:239 degrees ccw from east +237.5:238.5:238 degrees ccw from east +236.5:237.5:237 degrees ccw from east +235.5:236.5:236 degrees ccw from east +234.5:235.5:235 degrees ccw from east +233.5:234.5:234 degrees ccw from east +232.5:233.5:233 degrees ccw from east +231.5:232.5:232 degrees ccw from east +230.5:231.5:231 degrees ccw from east +229.5:230.5:230 degrees ccw from east +228.5:229.5:229 degrees ccw from east +227.5:228.5:228 degrees ccw from east +226.5:227.5:227 degrees ccw from east +225.5:226.5:226 degrees ccw from east +224.5:225.5:south ccw of west +223.5:224.5:224 degrees ccw from east +222.5:223.5:223 degrees ccw from east +221.5:222.5:222 degrees ccw from east +220.5:221.5:221 degrees ccw from east +219.5:220.5:220 degrees ccw from east +218.5:219.5:219 degrees ccw from east +217.5:218.5:218 degrees ccw from east +216.5:217.5:217 degrees ccw from east +215.5:216.5:216 degrees ccw from east +214.5:215.5:215 degrees ccw from east +213.5:214.5:214 degrees ccw from east +212.5:213.5:213 degrees ccw from east +211.5:212.5:212 degrees ccw from east +210.5:211.5:211 degrees ccw from east +209.5:210.5:210 degrees ccw from east +208.5:209.5:209 degrees ccw from east +207.5:208.5:208 degrees ccw from east +206.5:207.5:207 degrees ccw from east +205.5:206.5:206 degrees ccw from east +204.5:205.5:205 degrees ccw from east +203.5:204.5:204 degrees ccw from east +202.5:203.5:203 degrees ccw from east +201.5:202.5:202 degrees ccw from east +200.5:201.5:201 degrees ccw from east +199.5:200.5:200 degrees ccw from east +198.5:199.5:199 degrees ccw from east +197.5:198.5:198 degrees ccw from east +196.5:197.5:197 degrees ccw from east +195.5:196.5:196 degrees ccw from east +194.5:195.5:195 degrees ccw from east +193.5:194.5:194 degrees ccw from east +192.5:193.5:193 degrees ccw from east +191.5:192.5:192 degrees ccw from east +190.5:191.5:191 degrees ccw from east +189.5:190.5:190 degrees ccw from east +188.5:189.5:189 degrees ccw from east +187.5:188.5:188 degrees ccw from east +186.5:187.5:187 degrees ccw from east +185.5:186.5:186 degrees ccw from east +184.5:185.5:185 degrees ccw from east +183.5:184.5:184 degrees ccw from east +182.5:183.5:183 degrees ccw from east +181.5:182.5:182 degrees ccw from east +180.5:181.5:181 degrees ccw from east +179.5:180.5:west +178.5:179.5:179 degrees ccw from east +177.5:178.5:178 degrees ccw from east +176.5:177.5:177 degrees ccw from east +175.5:176.5:176 degrees ccw from east +174.5:175.5:175 degrees ccw from east +173.5:174.5:174 degrees ccw from east +172.5:173.5:173 degrees ccw from east +171.5:172.5:172 degrees ccw from east +170.5:171.5:171 degrees ccw from east +169.5:170.5:170 degrees ccw from east +168.5:169.5:169 degrees ccw from east +167.5:168.5:168 degrees ccw from east +166.5:167.5:167 degrees ccw from east +165.5:166.5:166 degrees ccw from east +164.5:165.5:165 degrees ccw from east +163.5:164.5:164 degrees ccw from east +162.5:163.5:163 degrees ccw from east +161.5:162.5:162 degrees ccw from east +160.5:161.5:161 degrees ccw from east +159.5:160.5:160 degrees ccw from east +158.5:159.5:159 degrees ccw from east +157.5:158.5:158 degrees ccw from east +156.5:157.5:157 degrees ccw from east +155.5:156.5:156 degrees ccw from east +154.5:155.5:155 degrees ccw from east +153.5:154.5:154 degrees ccw from east +152.5:153.5:153 degrees ccw from east +151.5:152.5:152 degrees ccw from east +150.5:151.5:151 degrees ccw from east +149.5:150.5:150 degrees ccw from east +148.5:149.5:149 degrees ccw from east +147.5:148.5:148 degrees ccw from east +146.5:147.5:147 degrees ccw from east +145.5:146.5:146 degrees ccw from east +144.5:145.5:145 degrees ccw from east +143.5:144.5:144 degrees ccw from east +142.5:143.5:143 degrees ccw from east +141.5:142.5:142 degrees ccw from east +140.5:141.5:141 degrees ccw from east +139.5:140.5:140 degrees ccw from east +138.5:139.5:139 degrees ccw from east +137.5:138.5:138 degrees ccw from east +136.5:137.5:137 degrees ccw from east +135.5:136.5:136 degrees ccw from east +134.5:135.5:north ccw of west +133.5:134.5:134 degrees ccw from east +132.5:133.5:133 degrees ccw from east +131.5:132.5:132 degrees ccw from east +130.5:131.5:131 degrees ccw from east +129.5:130.5:130 degrees ccw from east +128.5:129.5:129 degrees ccw from east +127.5:128.5:128 degrees ccw from east +126.5:127.5:127 degrees ccw from east +125.5:126.5:126 degrees ccw from east +124.5:125.5:125 degrees ccw from east +123.5:124.5:124 degrees ccw from east +122.5:123.5:123 degrees ccw from east +121.5:122.5:122 degrees ccw from east +120.5:121.5:121 degrees ccw from east +119.5:120.5:120 degrees ccw from east +118.5:119.5:119 degrees ccw from east +117.5:118.5:118 degrees ccw from east +116.5:117.5:117 degrees ccw from east +115.5:116.5:116 degrees ccw from east +114.5:115.5:115 degrees ccw from east +113.5:114.5:114 degrees ccw from east +112.5:113.5:113 degrees ccw from east +111.5:112.5:112 degrees ccw from east +110.5:111.5:111 degrees ccw from east +109.5:110.5:110 degrees ccw from east +108.5:109.5:109 degrees ccw from east +107.5:108.5:108 degrees ccw from east +106.5:107.5:107 degrees ccw from east +105.5:106.5:106 degrees ccw from east +104.5:105.5:105 degrees ccw from east +103.5:104.5:104 degrees ccw from east +102.5:103.5:103 degrees ccw from east +101.5:102.5:102 degrees ccw from east +100.5:101.5:101 degrees ccw from east +99.5:100.5:100 degrees ccw from east +98.5:99.5:99 degrees ccw from east +97.5:98.5:98 degrees ccw from east +96.5:97.5:97 degrees ccw from east +95.5:96.5:96 degrees ccw from east +94.5:95.5:95 degrees ccw from east +93.5:94.5:94 degrees ccw from east +92.5:93.5:93 degrees ccw from east +91.5:92.5:92 degrees ccw from east +90.5:91.5:91 degrees ccw from east +89.5:90.5:north +88.5:89.5:89 degrees ccw from east +87.5:88.5:88 degrees ccw from east +86.5:87.5:87 degrees ccw from east +85.5:86.5:86 degrees ccw from east +84.5:85.5:85 degrees ccw from east +83.5:84.5:84 degrees ccw from east +82.5:83.5:83 degrees ccw from east +81.5:82.5:82 degrees ccw from east +80.5:81.5:81 degrees ccw from east +79.5:80.5:80 degrees ccw from east +78.5:79.5:79 degrees ccw from east +77.5:78.5:78 degrees ccw from east +76.5:77.5:77 degrees ccw from east +75.5:76.5:76 degrees ccw from east +74.5:75.5:75 degrees ccw from east +73.5:74.5:74 degrees ccw from east +72.5:73.5:73 degrees ccw from east +71.5:72.5:72 degrees ccw from east +70.5:71.5:71 degrees ccw from east +69.5:70.5:70 degrees ccw from east +68.5:69.5:69 degrees ccw from east +67.5:68.5:68 degrees ccw from east +66.5:67.5:67 degrees ccw from east +65.5:66.5:66 degrees ccw from east +64.5:65.5:65 degrees ccw from east +63.5:64.5:64 degrees ccw from east +62.5:63.5:63 degrees ccw from east +61.5:62.5:62 degrees ccw from east +60.5:61.5:61 degrees ccw from east +59.5:60.5:60 degrees ccw from east +58.5:59.5:59 degrees ccw from east +57.5:58.5:58 degrees ccw from east +56.5:57.5:57 degrees ccw from east +55.5:56.5:56 degrees ccw from east +54.5:55.5:55 degrees ccw from east +53.5:54.5:54 degrees ccw from east +52.5:53.5:53 degrees ccw from east +51.5:52.5:52 degrees ccw from east +50.5:51.5:51 degrees ccw from east +49.5:50.5:50 degrees ccw from east +48.5:49.5:49 degrees ccw from east +47.5:48.5:48 degrees ccw from east +46.5:47.5:47 degrees ccw from east +45.5:46.5:46 degrees ccw from east +44.5:45.5:north ccw of east +43.5:44.5:44 degrees ccw from east +42.5:43.5:43 degrees ccw from east +41.5:42.5:42 degrees ccw from east +40.5:41.5:41 degrees ccw from east +39.5:40.5:40 degrees ccw from east +38.5:39.5:39 degrees ccw from east +37.5:38.5:38 degrees ccw from east +36.5:37.5:37 degrees ccw from east +35.5:36.5:36 degrees ccw from east +34.5:35.5:35 degrees ccw from east +33.5:34.5:34 degrees ccw from east +32.5:33.5:33 degrees ccw from east +31.5:32.5:32 degrees ccw from east +30.5:31.5:31 degrees ccw from east +29.5:30.5:30 degrees ccw from east +28.5:29.5:29 degrees ccw from east +27.5:28.5:28 degrees ccw from east +26.5:27.5:27 degrees ccw from east +25.5:26.5:26 degrees ccw from east +24.5:25.5:25 degrees ccw from east +23.5:24.5:24 degrees ccw from east +22.5:23.5:23 degrees ccw from east +21.5:22.5:22 degrees ccw from east +20.5:21.5:21 degrees ccw from east +19.5:20.5:20 degrees ccw from east +18.5:19.5:19 degrees ccw from east +17.5:18.5:18 degrees ccw from east +16.5:17.5:17 degrees ccw from east +15.5:16.5:16 degrees ccw from east +14.5:15.5:15 degrees ccw from east +13.5:14.5:14 degrees ccw from east +12.5:13.5:13 degrees ccw from east +11.5:12.5:12 degrees ccw from east +10.5:11.5:11 degrees ccw from east +9.5:10.5:10 degrees ccw from east +8.5:9.5:9 degrees ccw from east +7.5:8.5:8 degrees ccw from east +6.5:7.5:7 degrees ccw from east +5.5:6.5:6 degrees ccw from east +4.5:5.5:5 degrees ccw from east +3.5:4.5:4 degrees ccw from east +2.5:3.5:3 degrees ccw from east +1.5:2.5:2 degrees ccw from east +0.5:1.5:1 degree ccw from east +0:no aspect diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_filled @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_rcl new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_rcl @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..8ea038ecd9b2721d8aec7737058103acc2d122e9 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_slope @@ -0,0 +1,32 @@ +# 27 categories +slope in degrees + +0.00 0.00 0.00 0.00 +26.5:27.5:27 degrees +25.5:26.5:26 degrees +24.5:25.5:25 degrees +23.5:24.5:24 degrees +22.5:23.5:23 degrees +21.5:22.5:22 degrees +20.5:21.5:21 degrees +19.5:20.5:20 degrees +18.5:19.5:19 degrees +17.5:18.5:18 degrees +16.5:17.5:17 degrees +15.5:16.5:16 degrees +14.5:15.5:15 degrees +13.5:14.5:14 degrees +12.5:13.5:13 degrees +11.5:12.5:12 degrees +10.5:11.5:11 degrees +9.5:10.5:10 degrees +8.5:9.5:9 degrees +7.5:8.5:8 degrees +6.5:7.5:7 degrees +5.5:6.5:6 degrees +4.5:5.5:5 degrees +3.5:4.5:4 degrees +2.5:3.5:3 degrees +1.5:2.5:2 degrees +0.5:1.5:1 degree +0:0.5:zero slope diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_wk new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dem_wk @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dir_temp b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dir_temp new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/dir_temp @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/drain_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/drain_wk new file mode 100644 index 0000000000000000000000000000000000000000..1813f80315f1278b2f91f81e2458da06d21e28b8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/drain_wk @@ -0,0 +1,4 @@ +# 0 categories +Watershed drainage direction (CCW from East divided by 45deg) + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/halfbasins b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/halfbasins new file mode 100644 index 0000000000000000000000000000000000000000..c103cf5859e88eadf78dea16969dd328eb085b00 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/halfbasins @@ -0,0 +1,4 @@ +# 0 categories +Watershed half-basins + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/slp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/slp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/slp_rcl @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/streams_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/streams_wk new file mode 100644 index 0000000000000000000000000000000000000000..df004bc74b849113750d3753db7e9163e75a981e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/streams_wk @@ -0,0 +1,4 @@ +# 0 categories +Watershed stream segments + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/subbasins_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/subbasins_wk new file mode 100644 index 0000000000000000000000000000000000000000..06b727c0ff8184d3046d84a63249eab05586bcfc --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/subbasins_wk @@ -0,0 +1,4 @@ +# 0 categories +Watershed basins + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/unfilled_areas b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/unfilled_areas new file mode 100644 index 0000000000000000000000000000000000000000..0aa4f12ebca91323bfeea155c9010288d5b3113e --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cats/unfilled_areas @@ -0,0 +1,4 @@ +# 0 categories + + +0.00 0.00 0.00 0.00 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/asp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/asp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..d6e75d526a50386120ca9f119bdcf5ee7c7ae33f Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/asp_rcl differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_rcl new file mode 100644 index 0000000000000000000000000000000000000000..0b39e22afa3ff22a981e7a57b1d09afc0e655ab1 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_rcl differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dem_wk new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dir_temp b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dir_temp new file mode 100644 index 0000000000000000000000000000000000000000..053a745ac07723b7863397867f660bc082c7b369 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/dir_temp differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/drain_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/drain_wk new file mode 100644 index 0000000000000000000000000000000000000000..ce8320aff0e1a1afe937e65a22a61aeb3def2a4f Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/drain_wk differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/halfbasins b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/halfbasins new file mode 100644 index 0000000000000000000000000000000000000000..8e0ef888fff5706b53953acdb1ba25cabcf80a75 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/halfbasins differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/slp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/slp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..1cf32869c7cfe8364ddea6eb4a0cf765a7badbf9 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/slp_rcl differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/streams_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/streams_wk new file mode 100644 index 0000000000000000000000000000000000000000..9a272d55a7e906b9af41f091b71e57f555227092 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/streams_wk differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/subbasins_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/subbasins_wk new file mode 100644 index 0000000000000000000000000000000000000000..3df069defbd177b848aaa3af6075ffc2c22eb02c Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/subbasins_wk differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/unfilled_areas b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/unfilled_areas new file mode 100644 index 0000000000000000000000000000000000000000..5bdbff6aa154c7227ad0fc25e1f1ce4474757098 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell/unfilled_areas differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_format b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_format new file mode 100644 index 0000000000000000000000000000000000000000..97c7edd5f5eaf94c355871c8ae1799005459d48d --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_format @@ -0,0 +1,3 @@ +type: double +byte_order: xdr +lzw_compression_bits: -1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_quant b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_quant new file mode 100644 index 0000000000000000000000000000000000000000..3df39b8aff20cd5020c1fd3e7c04fc8f9b50bf6c --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_quant @@ -0,0 +1 @@ +round \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_range new file mode 100644 index 0000000000000000000000000000000000000000..ceefc27a81f7a86a1428945e86ba95b15bb816c1 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/f_range differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/stats new file mode 100644 index 0000000000000000000000000000000000000000..fd7f0d7355528ef2024534c5ebbe3874c49be768 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/accum_wk/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..2ab7d8a3028f582e9e8aaf8cb553e070bca544ab Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/range new file mode 100644 index 0000000000000000000000000000000000000000..9c67935b4f7ea6b9e676e8dae3b32b6f5aa810ee --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/range @@ -0,0 +1 @@ +1 4 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/stats new file mode 100644 index 0000000000000000000000000000000000000000..424c4d40ece75bb1d0c49804bd855993129947fe Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/asp_rcl/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_format b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_format new file mode 100644 index 0000000000000000000000000000000000000000..ac88f06b5c86836632b105edfa8fc02e166242c7 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_format @@ -0,0 +1,3 @@ +type: float +byte_order: xdr +lzw_compression_bits: -1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_quant b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_quant new file mode 100644 index 0000000000000000000000000000000000000000..b426134bcec204c89614e64d367590d7c7e4a4ea --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_quant @@ -0,0 +1 @@ +0:360:0:360 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_range new file mode 100644 index 0000000000000000000000000000000000000000..dd6a62896cfd6715958b15aca9c3f04047c00726 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/f_range differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..2ab7d8a3028f582e9e8aaf8cb553e070bca544ab Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/stats new file mode 100644 index 0000000000000000000000000000000000000000..54af605d6051ebb9a592b6b438ef95702f6098aa --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_aspect/stats @@ -0,0 +1 @@ +AZ"€d¶šAÙŠp²5…À \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_format b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_format new file mode 100644 index 0000000000000000000000000000000000000000..ac88f06b5c86836632b105edfa8fc02e166242c7 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_format @@ -0,0 +1,3 @@ +type: float +byte_order: xdr +lzw_compression_bits: -1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_quant b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_quant new file mode 100644 index 0000000000000000000000000000000000000000..3df39b8aff20cd5020c1fd3e7c04fc8f9b50bf6c --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_quant @@ -0,0 +1 @@ +round \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_range new file mode 100644 index 0000000000000000000000000000000000000000..c369c1544e47052bdf021bdccc1bc63d891a9616 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/f_range differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/stats new file mode 100644 index 0000000000000000000000000000000000000000..d44a1e23696e45d79d2ce70a9a1111f1e7746642 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_filled/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/range new file mode 100644 index 0000000000000000000000000000000000000000..9c67935b4f7ea6b9e676e8dae3b32b6f5aa810ee --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/range @@ -0,0 +1 @@ +1 4 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/stats new file mode 100644 index 0000000000000000000000000000000000000000..59deaf3b01d3a94f026524bdcc1020859440f2b6 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_rcl/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_format b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_format new file mode 100644 index 0000000000000000000000000000000000000000..ac88f06b5c86836632b105edfa8fc02e166242c7 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_format @@ -0,0 +1,3 @@ +type: float +byte_order: xdr +lzw_compression_bits: -1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_quant b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_quant new file mode 100644 index 0000000000000000000000000000000000000000..4311833eda46b9f2c44a705cb96f6da37ac595a6 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_quant @@ -0,0 +1 @@ +0:90:0:90 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_range new file mode 100644 index 0000000000000000000000000000000000000000..4e52d0a5bcc92eff0cf8ba64d39bf2df85da149a Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/f_range differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..2ab7d8a3028f582e9e8aaf8cb553e070bca544ab Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/stats new file mode 100644 index 0000000000000000000000000000000000000000..523cd83d0844bb75c086195870b65190b3890ef4 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_slope/stats @@ -0,0 +1 @@ +A$ `A6Ýp®ý«À \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_format b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_format new file mode 100644 index 0000000000000000000000000000000000000000..ac88f06b5c86836632b105edfa8fc02e166242c7 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_format @@ -0,0 +1,3 @@ +type: float +byte_order: xdr +lzw_compression_bits: -1 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_quant b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_quant new file mode 100644 index 0000000000000000000000000000000000000000..3df39b8aff20cd5020c1fd3e7c04fc8f9b50bf6c --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_quant @@ -0,0 +1 @@ +round \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_range new file mode 100644 index 0000000000000000000000000000000000000000..b5881ac2c02b51b4fac9b767e2d9ff4ec77fcf4e Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/f_range differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/stats new file mode 100644 index 0000000000000000000000000000000000000000..df70dc7e71f5e5deccd16d21249d97cbd5698fe5 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dem_wk/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/range new file mode 100644 index 0000000000000000000000000000000000000000..54cb036dc13895ed9bb4aed88630e8eceb9c13cd --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/range @@ -0,0 +1 @@ +45 360 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/stats new file mode 100644 index 0000000000000000000000000000000000000000..f07f72ab4f190a6c8352c9153479fc1a8f52d5b5 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/dir_temp/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/range new file mode 100644 index 0000000000000000000000000000000000000000..4887a3a152fa7e45f2c7945074e35b016717fae3 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/range @@ -0,0 +1 @@ +-8 8 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/stats new file mode 100644 index 0000000000000000000000000000000000000000..874d193ad36d9530dcee28e4c9434cdabdfe2cb2 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/drain_wk/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..6e90e28e4bc6b912591e0d0a25ba63de87e217af Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/range new file mode 100644 index 0000000000000000000000000000000000000000..8219151fbe5491d9c58209624fb570486e157edd --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/range @@ -0,0 +1 @@ +1 1950 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/stats new file mode 100644 index 0000000000000000000000000000000000000000..45b5890a97be3d1129dad87fc17290393789cae8 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/halfbasins/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..2ab7d8a3028f582e9e8aaf8cb553e070bca544ab Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/range new file mode 100644 index 0000000000000000000000000000000000000000..dcefd2e58854c2438089e15907481d73ee141e9d --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/range @@ -0,0 +1 @@ +1 30 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/stats new file mode 100644 index 0000000000000000000000000000000000000000..b237709900b2266d7daf9c4a84d4ca868c18de48 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/slp_rcl/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..9940c4dc66102507c527873235684daefa1f5e1d Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/range new file mode 100644 index 0000000000000000000000000000000000000000..b7f79b06d8a1ddc0da2058f9f155f356bf0f4656 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/range @@ -0,0 +1 @@ +2 1950 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/stats new file mode 100644 index 0000000000000000000000000000000000000000..309b6b7bf2a1c32a166c98a76825061830bd9e9e Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/streams_wk/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..6e90e28e4bc6b912591e0d0a25ba63de87e217af Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/range new file mode 100644 index 0000000000000000000000000000000000000000..b7f79b06d8a1ddc0da2058f9f155f356bf0f4656 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/range @@ -0,0 +1 @@ +2 1950 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/stats new file mode 100644 index 0000000000000000000000000000000000000000..4d91f2634d4acb722f9d5f06cd6a61c82c183032 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/subbasins_wk/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/nullcmpr b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/nullcmpr new file mode 100644 index 0000000000000000000000000000000000000000..afed4f27510f6658b279580467f51006ce07c872 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/nullcmpr differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/range b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/range new file mode 100644 index 0000000000000000000000000000000000000000..b748e2dcfcbc7db3aae214293cef6cb2afbd0a65 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/range @@ -0,0 +1 @@ +0 0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/stats b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/stats new file mode 100644 index 0000000000000000000000000000000000000000..f486e22ce0dd5fee3ca3457dc73ff8244b9a359b Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cell_misc/unfilled_areas/stats differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..6e902efb738f28a4ac74e465003395f69a3f8ee8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/accum_wk @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: -1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/asp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/asp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..d72c7eaa6e077722b409471774404b33b7461060 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/asp_rcl @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 0 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..6e902efb738f28a4ac74e465003395f69a3f8ee8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_aspect @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: -1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..6e902efb738f28a4ac74e465003395f69a3f8ee8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_filled @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: -1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_rcl new file mode 100644 index 0000000000000000000000000000000000000000..d72c7eaa6e077722b409471774404b33b7461060 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_rcl @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 0 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..6e902efb738f28a4ac74e465003395f69a3f8ee8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_slope @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: -1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_wk new file mode 100644 index 0000000000000000000000000000000000000000..6e902efb738f28a4ac74e465003395f69a3f8ee8 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dem_wk @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: -1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dir_temp b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dir_temp new file mode 100644 index 0000000000000000000000000000000000000000..c78883e31516d97636e72c4ec70974cd1184b478 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/dir_temp @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/drain_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/drain_wk new file mode 100644 index 0000000000000000000000000000000000000000..3dc63520caaacee556f783f9a6f19dcc75d4c5a9 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/drain_wk @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 3 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/halfbasins b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/halfbasins new file mode 100644 index 0000000000000000000000000000000000000000..c78883e31516d97636e72c4ec70974cd1184b478 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/halfbasins @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/slp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/slp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..d72c7eaa6e077722b409471774404b33b7461060 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/slp_rcl @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 0 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/streams_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/streams_wk new file mode 100644 index 0000000000000000000000000000000000000000..c78883e31516d97636e72c4ec70974cd1184b478 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/streams_wk @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/subbasins_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/subbasins_wk new file mode 100644 index 0000000000000000000000000000000000000000..c78883e31516d97636e72c4ec70974cd1184b478 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/subbasins_wk @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 1 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/unfilled_areas b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/unfilled_areas new file mode 100644 index 0000000000000000000000000000000000000000..d72c7eaa6e077722b409471774404b33b7461060 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/cellhd/unfilled_areas @@ -0,0 +1,12 @@ +proj: 99 +zone: 0 +north: 6738200 +south: 6719600 +east: 861400 +west: 841200 +cols: 202 +rows: 186 +e-w resol: 100 +n-s resol: 100 +format: 0 +compressed: 5 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..e5659ba0a5c67d15cbbba3476b16aa57bf52369f --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/accum_wk @@ -0,0 +1,12 @@ +% -22226 4442 +-22226:0 -1057.0592208395133:0 +-1057.0592208395133:0 -185.25340735301106:0:0:255 +-185.25340735301106:0:0:255 -32.497064803448218:0:255:255 +-32.497064803448218:0:255:255 -11.436441044727948:0:255:0 +-11.436441044727948:0:255:0 -1:255:255:0 +-1:255:255:0 1:255:255:0 +1:255:255:0 11.436441044727948:0:255:0 +11.436441044727948:0:255:0 32.497064803448218:0:255:255 +32.497064803448218:0:255:255 185.25340735301106:0:0:255 +185.25340735301106:0:0:255 1057.0592208395133:0 +1057.0592208395133:0 4442:0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..9b14dfab806bdca5065e8fd1307a2c8a56ca47fe --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_aspect @@ -0,0 +1,3 @@ +% 0 359.98919677734375 +0:0 179.99459838867188:255 +179.99459838867188:255 359.98919677734375:0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..beb56ec45f37cab449c12b4471413a402c621931 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_filled @@ -0,0 +1,256 @@ +% 267 538.9730224609375 +267:68:1:84 268.06613424804686:68:2:86 +268.06613424804686:68:2:86 269.13226849609373:69:4:87 +269.13226849609373:69:4:87 270.19840274414065:69:5:89 +270.19840274414065:69:5:89 271.26725672241213:70:7:90 +271.26725672241213:70:7:90 272.33339097045899:70:8:92 +272.33339097045899:70:8:92 273.39952521850586:70:10:93 +273.39952521850586:70:10:93 274.46565946655272:70:11:94 +274.46565946655272:70:11:94 275.53179371459959:71:13:96 +275.53179371459959:71:13:96 276.59792796264651:71:14:97 +276.59792796264651:71:14:97 277.66678194091799:71:16:99 +277.66678194091799:71:16:99 278.73291618896485:71:17:100 +278.73291618896485:71:17:100 279.79905043701172:71:19:101 +279.79905043701172:71:19:101 280.86518468505858:72:20:103 +280.86518468505858:72:20:103 281.93131893310544:72:22:104 +281.93131893310544:72:22:104 282.99745318115237:72:23:105 +282.99745318115237:72:23:105 284.06630715942384:72:24:106 +284.06630715942384:72:24:106 285.13244140747071:72:26:108 +285.13244140747071:72:26:108 286.19857565551757:72:27:109 +286.19857565551757:72:27:109 287.26470990356444:72:28:110 +287.26470990356444:72:28:110 288.3308441516113:72:29:111 +288.3308441516113:72:29:111 289.39697839965822:72:31:112 +289.39697839965822:72:31:112 290.46311264770509:72:32:113 +290.46311264770509:72:32:113 291.53196662597657:72:33:115 +291.53196662597657:72:33:115 292.59810087402343:72:35:116 +292.59810087402343:72:35:116 293.66423512207029:72:36:117 +293.66423512207029:72:36:117 294.73036937011716:72:37:118 +294.73036937011716:72:37:118 295.79650361816408:72:38:119 +295.79650361816408:72:38:119 296.86263786621095:72:40:120 +296.86263786621095:72:40:120 297.93149184448242:72:41:121 +297.93149184448242:72:41:121 298.99762609252929:71:42:122 +298.99762609252929:71:42:122 300.06376034057615:71:44:122 +300.06376034057615:71:44:122 301.12989458862307:71:45:123 +301.12989458862307:71:45:123 302.19602883666994:71:46:124 +302.19602883666994:71:46:124 303.2621630847168:71:47:125 +303.2621630847168:71:47:125 304.32829733276367:70:48:126 +304.32829733276367:70:48:126 305.39715131103515:70:50:126 +305.39715131103515:70:50:126 306.46328555908201:70:51:127 +306.46328555908201:70:51:127 307.52941980712887:70:52:128 +307.52941980712887:70:52:128 308.5955540551758:69:53:129 +308.5955540551758:69:53:129 309.66168830322266:69:55:129 +309.66168830322266:69:55:129 310.72782255126953:69:56:130 +310.72782255126953:69:56:130 311.796676529541:68:57:131 +311.796676529541:68:57:131 312.86281077758787:68:58:131 +312.86281077758787:68:58:131 313.92894502563479:68:59:132 +313.92894502563479:68:59:132 314.99507927368165:67:61:132 +314.99507927368165:67:61:132 316.06121352172852:67:62:133 +316.06121352172852:67:62:133 317.12734776977538:66:63:133 +317.12734776977538:66:63:133 318.19620174804686:66:64:134 +318.19620174804686:66:64:134 319.26233599609373:66:65:134 +319.26233599609373:66:65:134 320.32847024414065:65:66:135 +320.32847024414065:65:66:135 321.39460449218751:65:68:135 +321.39460449218751:65:68:135 322.46073874023438:64:69:136 +322.46073874023438:64:69:136 323.52687298828124:64:70:136 +323.52687298828124:64:70:136 324.59300723632811:63:71:136 +324.59300723632811:63:71:136 325.66186121459958:63:72:137 +325.66186121459958:63:72:137 326.7279954626465:62:73:137 +326.7279954626465:62:73:137 327.79412971069337:62:74:137 +327.79412971069337:62:74:137 328.86026395874023:62:76:138 +328.86026395874023:62:76:138 329.9263982067871:61:77:138 +329.9263982067871:61:77:138 330.99253245483396:61:78:138 +330.99253245483396:61:78:138 332.06138643310544:60:79:138 +332.06138643310544:60:79:138 333.12752068115236:60:80:139 +333.12752068115236:60:80:139 334.19365492919923:59:81:139 +334.19365492919923:59:81:139 335.25978917724609:59:82:139 +335.25978917724609:59:82:139 336.32592342529296:58:83:139 +336.32592342529296:58:83:139 337.39205767333988:58:84:140 +337.39205767333988:58:84:140 338.46091165161135:57:85:140 +338.46091165161135:57:85:140 339.52704589965822:57:86:140 +339.52704589965822:57:86:140 340.59318014770508:56:88:140 +340.59318014770508:56:88:140 341.65931439575195:56:89:140 +341.65931439575195:56:89:140 342.72544864379881:55:90:140 +342.72544864379881:55:90:140 343.79158289184568:55:91:141 +343.79158289184568:55:91:141 344.85771713989254:54:92:141 +344.85771713989254:54:92:141 345.92657111816408:54:93:141 +345.92657111816408:54:93:141 346.99270536621094:53:94:141 +346.99270536621094:53:94:141 348.05883961425781:53:95:141 +348.05883961425781:53:95:141 349.12497386230467:52:96:141 +349.12497386230467:52:96:141 350.19110811035159:52:97:141 +350.19110811035159:52:97:141 351.25724235839846:51:98:141 +351.25724235839846:51:98:141 352.32609633666993:51:99:141 +352.32609633666993:51:99:141 353.3922305847168:50:100:142 +353.3922305847168:50:100:142 354.45836483276366:50:101:142 +354.45836483276366:50:101:142 355.52449908081053:49:102:142 +355.52449908081053:49:102:142 356.59063332885745:49:103:142 +356.59063332885745:49:103:142 357.65676757690426:49:104:142 +357.65676757690426:49:104:142 358.72290182495118:48:105:142 +358.72290182495118:48:105:142 359.79175580322266:48:106:142 +359.79175580322266:48:106:142 360.85789005126952:47:107:142 +360.85789005126952:47:107:142 361.92402429931639:47:108:142 +361.92402429931639:47:108:142 362.99015854736331:46:109:142 +362.99015854736331:46:109:142 364.05629279541017:46:110:142 +364.05629279541017:46:110:142 365.12242704345704:46:111:142 +365.12242704345704:46:111:142 366.19128102172851:45:112:142 +366.19128102172851:45:112:142 367.25741526977538:45:113:142 +367.25741526977538:45:113:142 368.32354951782224:44:113:142 +368.32354951782224:44:113:142 369.38968376586911:44:114:142 +369.38968376586911:44:114:142 370.45581801391603:44:115:142 +370.45581801391603:44:115:142 371.52195226196289:43:116:142 +371.52195226196289:43:116:142 372.59080624023437:43:117:142 +372.59080624023437:43:117:142 373.65694048828124:42:118:142 +373.65694048828124:42:118:142 374.7230747363281:42:119:142 +374.7230747363281:42:119:142 375.78920898437502:42:120:142 +375.78920898437502:42:120:142 376.85534323242189:41:121:142 +376.85534323242189:41:121:142 377.92147748046875:41:122:142 +377.92147748046875:41:122:142 378.98761172851562:41:123:142 +378.98761172851562:41:123:142 380.05646570678709:40:124:142 +380.05646570678709:40:124:142 381.12259995483396:40:125:142 +381.12259995483396:40:125:142 382.18873420288088:39:126:142 +382.18873420288088:39:126:142 383.25486845092775:39:127:142 +383.25486845092775:39:127:142 384.32100269897461:39:128:142 +384.32100269897461:39:128:142 385.38713694702147:38:129:142 +385.38713694702147:38:129:142 386.45599092529295:38:130:142 +386.45599092529295:38:130:142 387.52212517333987:38:130:142 +387.52212517333987:38:130:142 388.58825942138674:37:131:142 +388.58825942138674:37:131:142 389.6543936694336:37:132:142 +389.6543936694336:37:132:142 390.72052791748047:37:133:142 +390.72052791748047:37:133:142 391.78666216552733:36:134:142 +391.78666216552733:36:134:142 392.85551614379881:36:135:142 +392.85551614379881:36:135:142 393.92165039184573:35:136:142 +393.92165039184573:35:136:142 394.98778463989254:35:137:142 +394.98778463989254:35:137:142 396.05391888793946:35:138:141 +396.05391888793946:35:138:141 397.12005313598632:34:139:141 +397.12005313598632:34:139:141 398.18618738403319:34:140:141 +398.18618738403319:34:140:141 399.25232163208011:34:141:141 +399.25232163208011:34:141:141 400.32117561035159:33:142:141 +400.32117561035159:33:142:141 401.38730985839845:33:143:141 +401.38730985839845:33:143:141 402.45344410644532:33:144:141 +402.45344410644532:33:144:141 403.51957835449218:33:145:140 +403.51957835449218:33:145:140 404.58571260253905:32:146:140 +404.58571260253905:32:146:140 405.65184685058591:32:146:140 +405.65184685058591:32:146:140 406.72070082885739:32:147:140 +406.72070082885739:32:147:140 407.78683507690431:31:148:140 +407.78683507690431:31:148:140 408.85296932495118:31:149:139 +408.85296932495118:31:149:139 409.91910357299804:31:150:139 +409.91910357299804:31:150:139 410.98523782104496:31:151:139 +410.98523782104496:31:151:139 412.05137206909183:31:152:139 +412.05137206909183:31:152:139 413.11750631713869:31:153:138 +413.11750631713869:31:153:138 414.18636029541017:31:154:138 +414.18636029541017:31:154:138 415.25249454345703:30:155:138 +415.25249454345703:30:155:138 416.3186287915039:30:156:137 +416.3186287915039:30:156:137 417.38476303955076:30:157:137 +417.38476303955076:30:157:137 418.45089728759763:31:158:137 +418.45089728759763:31:158:137 419.51703153564455:31:159:136 +419.51703153564455:31:159:136 420.58588551391597:31:160:136 +420.58588551391597:31:160:136 421.65201976196289:31:161:136 +421.65201976196289:31:161:136 422.71815401000975:31:161:135 +422.71815401000975:31:161:135 423.78428825805668:31:162:135 +423.78428825805668:31:162:135 424.85042250610354:32:163:134 +424.85042250610354:32:163:134 425.91655675415041:32:164:134 +425.91655675415041:32:164:134 426.98541073242188:33:165:133 +426.98541073242188:33:165:133 428.05154498046875:33:166:133 +428.05154498046875:33:166:133 429.11767922851561:34:167:133 +429.11767922851561:34:167:133 430.18381347656248:34:168:132 +430.18381347656248:34:168:132 431.24994772460934:35:169:131 +431.24994772460934:35:169:131 432.31608197265621:36:170:131 +432.31608197265621:36:170:131 433.38221622070313:37:171:130 +433.38221622070313:37:171:130 434.45107019897466:37:172:130 +434.45107019897466:37:172:130 435.51720444702147:38:173:129 +435.51720444702147:38:173:129 436.58333869506839:39:173:129 +436.58333869506839:39:173:129 437.64947294311526:40:174:128 +437.64947294311526:40:174:128 438.71560719116212:41:175:127 +438.71560719116212:41:175:127 439.78174143920899:42:176:127 +439.78174143920899:42:176:127 440.85059541748046:44:177:126 +440.85059541748046:44:177:126 441.91672966552733:45:178:125 +441.91672966552733:45:178:125 442.98286391357425:46:179:124 +442.98286391357425:46:179:124 444.04899816162106:47:180:124 +444.04899816162106:47:180:124 445.11513240966792:49:181:123 +445.11513240966792:49:181:123 446.1812666577149:50:182:122 +446.1812666577149:50:182:122 447.25012063598638:52:182:121 +447.25012063598638:52:182:121 448.31625488403319:53:183:121 +448.31625488403319:53:183:121 449.38238913208011:55:184:120 +449.38238913208011:55:184:120 450.44852338012697:56:185:119 +450.44852338012697:56:185:119 451.51465762817384:58:186:118 +451.51465762817384:58:186:118 452.5807918762207:59:187:117 +452.5807918762207:59:187:117 453.64692612426757:61:188:116 +453.64692612426757:61:188:116 454.71578010253904:63:188:115 +454.71578010253904:63:188:115 455.78191435058596:64:189:114 +455.78191435058596:64:189:114 456.84804859863277:66:190:113 +456.84804859863277:66:190:113 457.91418284667969:68:191:112 +457.91418284667969:68:191:112 458.98031709472656:70:192:111 +458.98031709472656:70:192:111 460.04645134277342:72:193:110 +460.04645134277342:72:193:110 461.11530532104496:74:193:109 +461.11530532104496:74:193:109 462.18143956909182:76:194:108 +462.18143956909182:76:194:108 463.24757381713869:78:195:107 +463.24757381713869:78:195:107 464.31370806518555:80:196:106 +464.31370806518555:80:196:106 465.37984231323242:82:197:105 +465.37984231323242:82:197:105 466.44597656127928:84:197:104 +466.44597656127928:84:197:104 467.51211080932615:86:198:103 +467.51211080932615:86:198:103 468.58096478759762:88:199:101 +468.58096478759762:88:199:101 469.64709903564454:90:200:100 +469.64709903564454:90:200:100 470.71323328369141:92:200:99 +470.71323328369141:92:200:99 471.77936753173827:94:201:98 +471.77936753173827:94:201:98 472.8455017797852:96:202:96 +472.8455017797852:96:202:96 473.91163602783206:99:203:95 +473.91163602783206:99:203:95 474.98049000610354:101:203:94 +474.98049000610354:101:203:94 476.0466242541504:103:204:92 +476.0466242541504:103:204:92 477.11275850219727:105:205:91 +477.11275850219727:105:205:91 478.17889275024413:108:205:90 +478.17889275024413:108:205:90 479.245026998291:110:206:88 +479.245026998291:110:206:88 480.31116124633786:112:207:87 +480.31116124633786:112:207:87 481.38001522460934:115:208:86 +481.38001522460934:115:208:86 482.4461494726562:117:208:84 +482.4461494726562:117:208:84 483.51228372070312:119:209:83 +483.51228372070312:119:209:83 484.57841796875005:122:209:81 +484.57841796875005:122:209:81 485.64455221679685:124:210:80 +485.64455221679685:124:210:80 486.71068646484377:127:211:78 +486.71068646484377:127:211:78 487.77682071289064:129:211:77 +487.77682071289064:129:211:77 488.84567469116212:132:212:75 +488.84567469116212:132:212:75 489.91180893920898:134:213:73 +489.91180893920898:134:213:73 490.97794318725585:137:213:72 +490.97794318725585:137:213:72 492.04407743530271:139:214:70 +492.04407743530271:139:214:70 493.11021168334963:142:214:69 +493.11021168334963:142:214:69 494.17634593139644:144:215:67 +494.17634593139644:144:215:67 495.24519990966792:147:215:65 +495.24519990966792:147:215:65 496.31133415771478:149:216:64 +496.31133415771478:149:216:64 497.37746840576176:152:216:62 +497.37746840576176:152:216:62 498.44360265380857:155:217:60 +498.44360265380857:155:217:60 499.50973690185549:157:217:59 +499.50973690185549:157:217:59 500.57587114990235:160:218:57 +500.57587114990235:160:218:57 501.64472512817383:162:218:55 +501.64472512817383:162:218:55 502.7108593762207:165:219:54 +502.7108593762207:165:219:54 503.77699362426756:168:219:52 +503.77699362426756:168:219:52 504.84312787231443:170:220:50 +504.84312787231443:170:220:50 505.90926212036135:173:220:48 +505.90926212036135:173:220:48 506.97539636840816:176:221:47 +506.97539636840816:176:221:47 508.04153061645508:178:221:45 +508.04153061645508:178:221:45 509.11038459472655:181:222:43 +509.11038459472655:181:222:43 510.17651884277348:184:222:41 +510.17651884277348:184:222:41 511.24265309082034:186:222:40 +511.24265309082034:186:222:40 512.30878733886721:189:223:38 +512.30878733886721:189:223:38 513.37492158691407:192:223:37 +513.37492158691407:192:223:37 514.44105583496093:194:223:35 +514.44105583496093:194:223:35 515.50990981323241:197:224:33 +515.50990981323241:197:224:33 516.57604406127928:200:224:32 +516.57604406127928:200:224:32 517.64217830932614:202:225:31 +517.64217830932614:202:225:31 518.70831255737312:205:225:29 +518.70831255737312:205:225:29 519.77444680541998:208:225:28 +519.77444680541998:208:225:28 520.84058105346685:210:226:27 +520.84058105346685:210:226:27 521.90671530151371:213:226:26 +521.90671530151371:213:226:26 522.97556927978508:216:226:25 +522.97556927978508:216:226:25 524.04170352783206:218:227:25 +524.04170352783206:218:227:25 525.10783777587892:221:227:24 +525.10783777587892:221:227:24 526.17397202392578:223:227:24 +526.17397202392578:223:227:24 527.24010627197265:226:228:24 +527.24010627197265:226:228:24 528.30624052001963:229:228:25 +528.30624052001963:229:228:25 529.37509449829099:231:228:25 +529.37509449829099:231:228:25 530.44122874633786:234:229:26 +530.44122874633786:234:229:26 531.50736299438472:236:229:27 +531.50736299438472:236:229:27 532.57349724243159:239:229:28 +532.57349724243159:239:229:28 533.63963149047845:241:229:29 +533.63963149047845:241:229:29 534.70576573852532:244:230:30 +534.70576573852532:244:230:30 535.77461971679691:246:230:32 +535.77461971679691:246:230:32 536.84075396484377:248:230:33 +536.84075396484377:248:230:33 537.90688821289064:251:231:35 +537.90688821289064:251:231:35 538.9730224609375:253:231:37 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..7423385754e7fc17d145c6def4c6a2ce01a07cb2 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/dem_slope @@ -0,0 +1,8 @@ +% 0 90 +0:255 2:255:255:0 +2:255:255:0 5:0:255:0 +5:0:255:0 10:0:255:255 +10:0:255:255 15:0:0:255 +15:0:0:255 30:255:0:255 +30:255:0:255 50:255:0:0 +50:255:0:0 90:0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/drain_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/drain_wk new file mode 100644 index 0000000000000000000000000000000000000000..08355fde69515f7e8977fbc44c842cbbf8927fe1 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/drain_wk @@ -0,0 +1,3 @@ +% -8 8 +-8:0 0:255 +0:255 8:0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/halfbasins b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/halfbasins new file mode 100644 index 0000000000000000000000000000000000000000..40b533c60c0571a2e0f0a285917a34593f42aea5 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/halfbasins @@ -0,0 +1,909 @@ +% 1 1950 +%% +1:172:134:93 +2:172:158:165 +3:58:86:32 +4:68:135:103 +5:136:166:114 +6:159:109:68 +7:225:46:101 +8:101:9:214 +9:233:86:32 +10:191:85:71 +11:42:215:12 +12:194:240:227 +13:185:200:16 +14:39:176:138 +15:45:174:114 +16:193:70:55 +17:79:170:37 +18:188:38:162 +19:238:160:72 +20:89:215:116 +21:123:182:246 +22:102:26:71 +23:53:70:90 +24:99:99:54 +25:167:223:101 +26:238:45:105 +27:138:169:114 +28:132:252:176 +29:32:32:81 +30:129:225:213 +31:65:156:99 +32:188:115:233 +33:123:234:112 +34:201:101:138 +35:185:183:41 +36:150:243:155 +37:26:50:64 +38:171:121:165 +39:170:254:54 +40:244:185:175 +41:214:29:36 +42:170:112:237 +43:164:22:209 +44:126:227:95 +45:191:81:148 +46:222:91:4 +47:156:40:159 +48:176:109:240 +49:7:225:116 +50:154:77:51 +51:168:93:76 +52:133:67:19 +53:6:221:142 +54:160:193:216 +55:212:253:76 +56:204:120:224 +57:95:124:35 +58:10:32:118 +59:21:44:194 +60:13:157:119 +61:72:239:164 +62:191:104:98 +63:99:74:138 +64:31:53:59 +65:239:203:134 +66:150:239:14 +67:235:239:91 +68:82:76:180 +69:1:26:60 +70:19:77:252 +71:105:137:12 +72:61:47:9 +73:76:7:71 +74:197:96:73 +75:156:155:0 +76:43:62:31 +77:140:45:111 +78:65:113:219 +79:219:155:164 +80:47:229:90 +81:76:131:29 +82:198:124:49 +83:205:111:30 +84:201:179:246 +85:235:238:200 +86:117:150:205 +87:63:140:9 +88:59:137:9 +89:198:106:149 +90:51:112:72 +91:22:179:99 +92:117:252:32 +93:125:222:4 +94:37:0:251 +95:107:81:144 +96:161:190:123 +97:86:132:205 +98:208:71:77 +99:189:123:127 +100:84:61:239 +101:199:251:209 +102:212:54:166 +103:74:140:243 +104:90:86:96 +105:24:241:51 +106:76:178:247 +107:127:99:229 +108:37:44:224 +109:44:120:143 +110:203:191:236 +111:151:69:13 +112:218:26:112 +113:104:235:75 +114:253:193:184 +115:72:83:136 +116:0:208:89 +117:181:133:18 +118:224:229:131 +119:160:176:156 +120:206:131:45 +121:169:135:104 +122:166:167:122 +123:247:59:159 +124:23:68:104 +125:203:11:94 +126:50:118:123 +127:5:236:255 +128:221:122:166 +129:252:114:110 +130:51:122:100 +131:28:214:99 +132:120:114:137 +133:249:111:106 +134:233:126:255 +135:120:193:246 +136:105:28:49 +137:4:183:151 +138:122:241:148 +139:186:105:204 +140:199:217:94 +141:171:78:223 +142:249:15:19 +143:123:96:102 +144:77:121:46 +145:169:118:43 +146:251:38:228 +147:5:132:88 +148:7:73:119 +149:242:62:215 +150:222:251:86 +151:38:37:18 +152:159:80:201 +153:211:173:71 +154:159:82:168 +155:37:191:196 +156:80:162:11 +157:62:146:159 +158:95:149:191 +159:223:87:100 +160:111:213:183 +161:43:241:151 +162:123:148:109 +163:242:124:210 +164:153:152:188 +165:34:20:117 +166:78:115:98 +167:32:229:34 +168:133:108:247 +169:222:52:29 +170:141:171:187 +171:183:170:2 +172:110:114:85 +173:21:203:75 +174:69:47:43 +175:52:41:58 +176:37:108:145 +177:91:128:233 +178:124:184:210 +179:241:127:89 +180:185:199:142 +181:44:181:131 +182:106:34:161 +183:251:167:120 +184:204:221:89 +185:16:184:223 +186:91:253:110 +187:13:59:30 +188:124:65:197 +189:225:141:177 +190:42:41:163 +191:162:205:74 +192:243:59:171 +193:49:93:116 +194:100:161:134 +195:45:234:152 +196:148:91:197 +197:208:133:93 +198:0:97:44 +199:109:180:131 +200:202:49:46 +201:114:69:113 +202:192:106:7 +203:228:33:210 +204:118:35:129 +205:117:11:191 +206:46:235:17 +207:110:220:22 +208:252:96:148 +209:200:100:176 +210:60:130:156 +211:246:191:87 +212:243:249:216 +213:240:135:129 +214:128:168:193 +215:75:242:215 +216:112:22:87 +217:45:133:219 +218:20:52:105 +219:25:172:247 +220:247:78:82 +221:193:25:130 +222:15:254:2 +223:252:140:63 +224:5:24:125 +225:89:17:49 +226:170:172:202 +227:184:155:129 +228:68:102:224 +229:141:94:142 +230:250:147:185 +231:139:234:37 +232:84:89:83 +233:138:197:64 +234:78:187:22 +235:171:202:136 +236:58:23:158 +237:216:43:39 +238:45:13:161 +239:212:181:134 +240:112:193:52 +241:62:127:172 +242:247:145:96 +243:1:192:31 +244:145:137:147 +245:201:78:63 +246:29:217:17 +247:67:194:62 +248:168:232:66 +249:244:240:232 +250:6:130:51 +251:183:13:157 +252:28:244:110 +253:234:83:252 +254:137:225:185 +255:153:206:205 +256:65:216:41 +257:240:105:248 +258:9:195:84 +259:126:12:90 +260:134:102:73 +261:230:59:116 +262:57:85:100 +263:168:66:20 +264:63:207:226 +265:244:146:54 +266:116:42:131 +267:122:162:112 +268:23:123:104 +269:74:69:111 +270:192:99:182 +271:18:240:22 +272:28:251:108 +273:8:44:10 +274:105:242:57 +275:254:255:124 +276:111:35:249 +277:69:168:40 +278:61:255:238 +279:15:210:184 +280:142:61:147 +281:51:213:177 +282:111:116:106 +283:81:90:91 +284:71:96:212 +285:104:86:13 +286:20:157:164 +287:36:209:129 +288:68:231:172 +289:64:195:248 +290:61:240:67 +291:109:184:238 +292:54:6:60 +293:103:185:122 +294:184:243:138 +295:237:122:91 +296:168:127:85 +297:123:134:254 +298:111:67:235 +299:187:165:215 +300:105:122:155 +301:214:119:131 +302:99:184:45 +303:213:200:81 +304:156:122:57 +305:114:200:246 +306:77:175:68 +307:217:247:58 +308:104:116:71 +309:238:48:168 +310:218:104:180 +311:218:224:80 +312:64:2:200 +313:182:15:227 +314:138:150:169 +315:85:144:126 +316:214:190:66 +317:71:61:159 +318:45:254:158 +319:74:207:231 +320:169:178:1 +321:151:121:91 +322:1:64:176 +323:112:27:10 +324:47:243:244 +325:156:114:14 +326:115:185:135 +327:137:77:7 +328:169:85:43 +329:236:125:69 +330:121:145:232 +331:220:205:8 +332:139:65:242 +333:138:220:79 +334:145:217:226 +335:201:123:196 +336:141:171:149 +337:201:175:155 +338:97:212:155 +339:126:37:38 +340:89:39:186 +341:81:130:43 +342:242:95:190 +343:208:167:19 +344:216:36:92 +345:71:123:41 +346:147:116:140 +347:46:171:80 +348:35:57:114 +349:146:38:160 +350:79:209:132 +351:183:5:138 +352:12:163:37 +353:65:231:77 +354:19:190:184 +355:114:178:120 +356:79:216:175 +357:16:4:155 +358:103:244:182 +359:166:118:36 +360:96:61:221 +361:17:84:182 +362:207:161:24 +363:72:24:80 +364:218:251:43 +365:112:144:191 +366:198:145:176 +367:153:68:251 +368:137:248:127 +369:88:61:38 +370:95:111:91 +371:218:4:11 +372:29:234:140 +373:250:59:29 +374:129:49:105 +375:30:226:13 +376:116:141:204 +377:180:245:48 +378:197:154:175 +379:71:165:31 +380:139:253:34 +381:86:45:250 +382:246:225:50 +383:21:175:248 +384:9:39:19 +385:136:107:251 +386:45:120:120 +387:97:248:6 +388:111:98:167 +389:80:10:141 +390:143:239:116 +391:112:178:190 +392:232:35:233 +393:184:231:254 +394:173:255:21 +395:106:130:249 +396:177:213:0 +397:149:175:191 +398:127:172:116 +399:242:94:130 +400:46:206:240 +401:107:205:194 +402:4:138:161 +403:215:16:181 +404:145:102:250 +405:116:245:234 +406:130:41:17 +407:240:78:73 +408:47:43:147 +409:200:88:164 +410:94:147:174 +411:14:125:55 +412:105:55:12 +413:159:108:155 +414:159:250:131 +415:19:9:187 +416:60:170:199 +417:186:95:144 +418:12:120:11 +419:40:106:127 +420:110:60:26 +421:232:32:80 +422:232:247:29 +423:20:190:224 +424:93:243:203 +425:173:18:200 +426:79:55:125 +427:176:6:81 +428:109:250:46 +429:5:86:59 +430:55:246:10 +431:127:7:228 +432:22:154:230 +433:78:188:156 +434:13:50:136 +435:98:198:240 +436:145:73:63 +437:77:78:254 +438:241:147:18 +439:113:168:213 +440:37:232:46 +441:79:129:48 +442:152:236:38 +443:238:44:226 +444:26:19:237 +445:121:1:109 +446:198:233:84 +447:91:79:96 +448:65:153:62 +449:34:31:57 +450:109:203:140 +451:179:131:176 +452:38:19:66 +453:100:226:80 +454:109:86:12 +455:189:84:151 +456:219:153:253 +457:186:176:193 +458:241:213:235 +459:133:160:163 +460:106:151:113 +461:203:160:31 +462:106:60:77 +463:236:121:175 +464:223:198:93 +465:78:101:223 +466:52:115:44 +467:103:161:137 +468:248:63:153 +469:15:224:197 +470:203:187:199 +471:206:170:187 +472:115:178:24 +473:22:75:128 +474:176:49:177 +475:82:176:110 +476:250:188:130 +477:239:6:94 +478:229:120:129 +479:153:188:115 +480:181:94:115 +481:12:9:34 +482:10:124:25 +483:238:192:100 +484:115:146:92 +485:79:236:249 +486:26:90:158 +487:152:49:240 +488:127:1:253 +489:173:158:148 +490:206:100:252 +491:84:77:60 +492:4:215:133 +493:245:168:87 +494:149:72:26 +495:231:241:109 +496:36:191:80 +497:182:37:184 +498:54:89:169 +499:211:30:75 +500:163:243:67 +501:71:73:171 +502:11:237:140 +503:50:17:8 +504:51:115:205 +505:232:147:65 +506:226:238:237 +507:168:5:38 +508:100:95:133 +509:14:153:89 +510:122:117:229 +511:122:142:127 +512:50:103:99 +513:197:119:118 +514:160:153:206 +515:197:155:103 +516:51:102:164 +517:55:219:184 +518:236:78:46 +519:209:17:244 +520:99:23:71 +521:81:183:238 +522:36:113:74 +523:139:8:103 +524:150:231:38 +525:141:140:208 +526:52:233:77 +527:25:171:171 +528:130:242:187 +529:210:88:82 +530:207:239:27 +531:144:184:2 +532:110:18:120 +533:128:36:28 +534:174:118:191 +535:202:153:199 +536:131:25:202 +537:145:53:31 +538:107:174:117 +539:91:37:87 +540:181:39:179 +541:226:213:74 +542:1:172:93 +543:170:0:17 +544:86:30:9 +545:152:197:98 +546:235:44:197 +547:35:147:133 +548:64:59:87 +549:166:74:247 +550:223:127:26 +551:147:176:178 +552:165:199:86 +553:114:216:122 +554:46:136:114 +555:149:205:112 +556:125:243:16 +557:160:103:116 +558:193:232:193 +559:49:227:244 +560:147:201:155 +561:239:89:218 +562:187:66:159 +563:140:235:122 +564:52:71:118 +565:72:14:132 +566:175:161:186 +567:193:254:6 +568:125:142:138 +569:222:13:197 +570:132:255:229 +571:215:14:75 +572:73:64:199 +573:119:214:28 +574:245:231:197 +575:212:77:181 +576:188:241:97 +577:209:80:17 +578:167:66:28 +579:246:33:139 +580:119:188:174 +581:40:212:35 +582:237:56:187 +583:10:202:52 +584:96:253:166 +585:221:221:229 +586:39:52:17 +587:222:153:163 +588:20:38:255 +589:57:86:48 +590:187:19:83 +591:216:213:214 +592:244:178:235 +593:88:134:123 +594:182:94:79 +595:177:52:128 +596:210:62:119 +597:41:161:80 +598:10:187:218 +599:68:251:207 +600:63:192:138 +601:154:246:224 +602:108:107:217 +603:135:188:80 +604:123:216:127 +605:217:185:189 +606:210:244:248 +607:164:179:247 +608:0:74:104 +609:189:116:175 +610:143:231:237 +611:41:195:68 +612:179:149:232 +613:75:23:168 +614:20:197:112 +615:99:26:135 +616:176:165:180 +617:93:162:218 +618:76:3:194 +619:210:103:77 +620:184:172:175 +621:102:115:241 +622:155:53:223 +623:189:189:219 +624:66:23:168 +625:89:56:98 +626:123:79:73 +627:237:15:223 +628:36:165:185 +629:176:123:234 +630:188:14:122 +631:126:78:47 +632:228:154:69 +633:145:205:27 +634:94:126:238 +635:217:42:177 +636:168:24:150 +637:19:152:24 +638:21:158:211 +639:199:108:99 +640:190:151:25 +641:166:141:106 +642:97:39:86 +643:168:245:124 +644:209:115:64 +645:153:174:242 +646:80:114:147 +647:202:94:183 +648:177:171:251 +649:190:2:6 +650:218:127:33 +651:220:52:184 +652:197:178:219 +653:49:219:161 +654:223:26:64 +655:138:215:144 +656:24:102:205 +657:62:206:187 +658:200:32:168 +659:42:246:99 +660:5:37:118 +661:105:54:191 +662:192:232:247 +663:156:178:50 +664:137:7:55 +665:143:109:34 +666:150:198:190 +667:56:85:187 +668:44:47:199 +669:50:145:25 +670:57:178:49 +671:233:181:132 +672:147:66:113 +673:219:246:107 +674:215:13:114 +675:95:50:255 +676:173:2:242 +677:160:54:109 +678:156:140:129 +679:106:81:208 +680:127:251:248 +681:205:218:21 +682:11:53:202 +683:106:251:51 +684:151:100:65 +685:167:171:45 +686:2:143:83 +687:235:94:129 +688:18:9:86 +689:84:162:177 +690:87:223:136 +691:86:103:217 +692:82:109:235 +693:223:112:59 +694:18:149:173 +695:201:226:228 +696:72:245:221 +697:97:179:163 +698:79:205:231 +699:16:55:185 +700:98:71:208 +701:66:190:172 +702:187:250:239 +703:181:202:231 +704:25:185:107 +705:165:12:224 +706:175:166:93 +707:59:246:154 +708:34:237:57 +709:234:73:134 +710:244:93:149 +711:112:174:222 +712:55:130:38 +713:85:5:178 +714:28:177:89 +715:231:185:7 +716:137:237:155 +717:212:254:130 +718:129:94:243 +719:141:144:57 +720:204:110:64 +721:230:16:112 +722:230:150:5 +723:91:222:12 +724:231:39:84 +725:160:196:203 +726:175:95:247 +727:50:156:80 +728:63:79:178 +729:210:123:71 +730:199:33:4 +731:205:207:247 +732:167:140:107 +733:79:61:189 +734:22:68:233 +735:216:231:24 +736:238:103:3 +737:81:43:245 +738:163:254:51 +739:38:190:24 +740:13:224:83 +741:3:133:167 +742:85:52:45 +743:7:50:236 +744:243:42:2 +745:34:96:137 +746:73:126:107 +747:191:105:130 +748:248:121:167 +749:195:240:138 +750:215:85:253 +751:27:167:70 +752:226:0:134 +753:64:118:37 +754:47:82:59 +755:39:214:200 +756:159:255:237 +757:54:206:217 +758:147:148:50 +759:3:154:131 +760:136:1:51 +761:174:160:189 +762:56:76:177 +763:219:22:193 +764:86:43:85 +765:100:41:56 +766:198:92:251 +767:253:72:163 +768:173:183:54 +769:44:174:214 +770:113:33:17 +771:13:5:69 +772:73:136:122 +773:121:133:61 +774:186:90:162 +775:92:154:7 +776:209:224:6 +777:0:199:71 +778:206:42:154 +779:94:7:239 +780:63:54:31 +781:130:157:51 +782:129:64:77 +783:66:225:50 +784:241:42:36 +785:175:46:252 +786:239:30:71 +787:165:203:218 +788:87:162:242 +789:46:43:210 +790:185:127:186 +791:103:153:137 +792:65:246:219 +793:193:255:175 +794:224:218:138 +795:166:10:100 +796:205:79:75 +797:142:159:9 +798:72:12:0 +799:209:41:20 +800:242:23:0 +801:130:242:173 +802:211:27:17 +803:220:71:238 +804:180:145:235 +805:214:228:181 +806:31:29:83 +807:154:160:59 +808:236:145:177 +809:188:21:152 +810:230:63:132 +811:48:144:154 +812:187:77:192 +813:27:33:103 +814:250:234:191 +815:174:120:138 +816:148:90:23 +817:126:149:32 +818:226:8:68 +819:192:58:12 +820:235:187:159 +821:20:117:35 +822:29:109:234 +823:138:85:110 +824:133:28:39 +825:216:116:201 +826:249:89:44 +827:154:167:43 +828:102:37:7 +829:217:185:28 +830:23:34:213 +831:255:197:246 +832:89:242:91 +833:157:82:169 +834:134:247:81 +835:127:1:222 +836:40:166:227 +837:167:64:121 +838:129:200:154 +839:238:1:147 +840:96:39:124 +841:33:39:39 +842:207:74:196 +843:161:255:208 +844:200:236:70 +845:155:153:20 +846:190:31:46 +847:10:169:217 +848:102:251:91 +849:249:6:190 +850:196:26:78 +851:102:158:44 +852:54:247:47 +853:115:74:54 +854:187:168:31 +855:153:138:61 +856:113:94:145 +857:190:218:184 +858:192:83:49 +859:35:232:98 +860:125:217:70 +861:81:149:93 +862:176:105:85 +863:52:90:214 +864:127:179:69 +865:203:43:244 +866:70:194:236 +867:226:175:225 +868:129:116:156 +869:120:53:247 +870:219:167:212 +871:131:121:29 +872:74:143:229 +873:251:217:161 +874:196:214:246 +875:30:80:219 +876:193:63:109 +877:14:32:36 +878:74:171:119 +879:3:176:173 +880:5:121:235 +881:108:222:1 +882:81:97:129 +883:130:115:6 +884:21:2:224 +885:216:69:121 +886:144:126:180 +887:192:244:4 +888:31:168:152 +889:64:14:39 +890:113:85:55 +891:174:201:85 +892:111:150:196 +893:2:77:184 +894:142:173:93 +895:27:35:64 +896:254:200:186 +897:87:217:186 +898:207:136:254 +899:242:203:195 +900:157:166:83 +901:213:93:153 +902:42:7:92 +903:134:196:227 +904:196:182:104 +905:22:6:177 +906:0:113:182 +%% diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/streams_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/streams_wk new file mode 100644 index 0000000000000000000000000000000000000000..40b533c60c0571a2e0f0a285917a34593f42aea5 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/streams_wk @@ -0,0 +1,909 @@ +% 1 1950 +%% +1:172:134:93 +2:172:158:165 +3:58:86:32 +4:68:135:103 +5:136:166:114 +6:159:109:68 +7:225:46:101 +8:101:9:214 +9:233:86:32 +10:191:85:71 +11:42:215:12 +12:194:240:227 +13:185:200:16 +14:39:176:138 +15:45:174:114 +16:193:70:55 +17:79:170:37 +18:188:38:162 +19:238:160:72 +20:89:215:116 +21:123:182:246 +22:102:26:71 +23:53:70:90 +24:99:99:54 +25:167:223:101 +26:238:45:105 +27:138:169:114 +28:132:252:176 +29:32:32:81 +30:129:225:213 +31:65:156:99 +32:188:115:233 +33:123:234:112 +34:201:101:138 +35:185:183:41 +36:150:243:155 +37:26:50:64 +38:171:121:165 +39:170:254:54 +40:244:185:175 +41:214:29:36 +42:170:112:237 +43:164:22:209 +44:126:227:95 +45:191:81:148 +46:222:91:4 +47:156:40:159 +48:176:109:240 +49:7:225:116 +50:154:77:51 +51:168:93:76 +52:133:67:19 +53:6:221:142 +54:160:193:216 +55:212:253:76 +56:204:120:224 +57:95:124:35 +58:10:32:118 +59:21:44:194 +60:13:157:119 +61:72:239:164 +62:191:104:98 +63:99:74:138 +64:31:53:59 +65:239:203:134 +66:150:239:14 +67:235:239:91 +68:82:76:180 +69:1:26:60 +70:19:77:252 +71:105:137:12 +72:61:47:9 +73:76:7:71 +74:197:96:73 +75:156:155:0 +76:43:62:31 +77:140:45:111 +78:65:113:219 +79:219:155:164 +80:47:229:90 +81:76:131:29 +82:198:124:49 +83:205:111:30 +84:201:179:246 +85:235:238:200 +86:117:150:205 +87:63:140:9 +88:59:137:9 +89:198:106:149 +90:51:112:72 +91:22:179:99 +92:117:252:32 +93:125:222:4 +94:37:0:251 +95:107:81:144 +96:161:190:123 +97:86:132:205 +98:208:71:77 +99:189:123:127 +100:84:61:239 +101:199:251:209 +102:212:54:166 +103:74:140:243 +104:90:86:96 +105:24:241:51 +106:76:178:247 +107:127:99:229 +108:37:44:224 +109:44:120:143 +110:203:191:236 +111:151:69:13 +112:218:26:112 +113:104:235:75 +114:253:193:184 +115:72:83:136 +116:0:208:89 +117:181:133:18 +118:224:229:131 +119:160:176:156 +120:206:131:45 +121:169:135:104 +122:166:167:122 +123:247:59:159 +124:23:68:104 +125:203:11:94 +126:50:118:123 +127:5:236:255 +128:221:122:166 +129:252:114:110 +130:51:122:100 +131:28:214:99 +132:120:114:137 +133:249:111:106 +134:233:126:255 +135:120:193:246 +136:105:28:49 +137:4:183:151 +138:122:241:148 +139:186:105:204 +140:199:217:94 +141:171:78:223 +142:249:15:19 +143:123:96:102 +144:77:121:46 +145:169:118:43 +146:251:38:228 +147:5:132:88 +148:7:73:119 +149:242:62:215 +150:222:251:86 +151:38:37:18 +152:159:80:201 +153:211:173:71 +154:159:82:168 +155:37:191:196 +156:80:162:11 +157:62:146:159 +158:95:149:191 +159:223:87:100 +160:111:213:183 +161:43:241:151 +162:123:148:109 +163:242:124:210 +164:153:152:188 +165:34:20:117 +166:78:115:98 +167:32:229:34 +168:133:108:247 +169:222:52:29 +170:141:171:187 +171:183:170:2 +172:110:114:85 +173:21:203:75 +174:69:47:43 +175:52:41:58 +176:37:108:145 +177:91:128:233 +178:124:184:210 +179:241:127:89 +180:185:199:142 +181:44:181:131 +182:106:34:161 +183:251:167:120 +184:204:221:89 +185:16:184:223 +186:91:253:110 +187:13:59:30 +188:124:65:197 +189:225:141:177 +190:42:41:163 +191:162:205:74 +192:243:59:171 +193:49:93:116 +194:100:161:134 +195:45:234:152 +196:148:91:197 +197:208:133:93 +198:0:97:44 +199:109:180:131 +200:202:49:46 +201:114:69:113 +202:192:106:7 +203:228:33:210 +204:118:35:129 +205:117:11:191 +206:46:235:17 +207:110:220:22 +208:252:96:148 +209:200:100:176 +210:60:130:156 +211:246:191:87 +212:243:249:216 +213:240:135:129 +214:128:168:193 +215:75:242:215 +216:112:22:87 +217:45:133:219 +218:20:52:105 +219:25:172:247 +220:247:78:82 +221:193:25:130 +222:15:254:2 +223:252:140:63 +224:5:24:125 +225:89:17:49 +226:170:172:202 +227:184:155:129 +228:68:102:224 +229:141:94:142 +230:250:147:185 +231:139:234:37 +232:84:89:83 +233:138:197:64 +234:78:187:22 +235:171:202:136 +236:58:23:158 +237:216:43:39 +238:45:13:161 +239:212:181:134 +240:112:193:52 +241:62:127:172 +242:247:145:96 +243:1:192:31 +244:145:137:147 +245:201:78:63 +246:29:217:17 +247:67:194:62 +248:168:232:66 +249:244:240:232 +250:6:130:51 +251:183:13:157 +252:28:244:110 +253:234:83:252 +254:137:225:185 +255:153:206:205 +256:65:216:41 +257:240:105:248 +258:9:195:84 +259:126:12:90 +260:134:102:73 +261:230:59:116 +262:57:85:100 +263:168:66:20 +264:63:207:226 +265:244:146:54 +266:116:42:131 +267:122:162:112 +268:23:123:104 +269:74:69:111 +270:192:99:182 +271:18:240:22 +272:28:251:108 +273:8:44:10 +274:105:242:57 +275:254:255:124 +276:111:35:249 +277:69:168:40 +278:61:255:238 +279:15:210:184 +280:142:61:147 +281:51:213:177 +282:111:116:106 +283:81:90:91 +284:71:96:212 +285:104:86:13 +286:20:157:164 +287:36:209:129 +288:68:231:172 +289:64:195:248 +290:61:240:67 +291:109:184:238 +292:54:6:60 +293:103:185:122 +294:184:243:138 +295:237:122:91 +296:168:127:85 +297:123:134:254 +298:111:67:235 +299:187:165:215 +300:105:122:155 +301:214:119:131 +302:99:184:45 +303:213:200:81 +304:156:122:57 +305:114:200:246 +306:77:175:68 +307:217:247:58 +308:104:116:71 +309:238:48:168 +310:218:104:180 +311:218:224:80 +312:64:2:200 +313:182:15:227 +314:138:150:169 +315:85:144:126 +316:214:190:66 +317:71:61:159 +318:45:254:158 +319:74:207:231 +320:169:178:1 +321:151:121:91 +322:1:64:176 +323:112:27:10 +324:47:243:244 +325:156:114:14 +326:115:185:135 +327:137:77:7 +328:169:85:43 +329:236:125:69 +330:121:145:232 +331:220:205:8 +332:139:65:242 +333:138:220:79 +334:145:217:226 +335:201:123:196 +336:141:171:149 +337:201:175:155 +338:97:212:155 +339:126:37:38 +340:89:39:186 +341:81:130:43 +342:242:95:190 +343:208:167:19 +344:216:36:92 +345:71:123:41 +346:147:116:140 +347:46:171:80 +348:35:57:114 +349:146:38:160 +350:79:209:132 +351:183:5:138 +352:12:163:37 +353:65:231:77 +354:19:190:184 +355:114:178:120 +356:79:216:175 +357:16:4:155 +358:103:244:182 +359:166:118:36 +360:96:61:221 +361:17:84:182 +362:207:161:24 +363:72:24:80 +364:218:251:43 +365:112:144:191 +366:198:145:176 +367:153:68:251 +368:137:248:127 +369:88:61:38 +370:95:111:91 +371:218:4:11 +372:29:234:140 +373:250:59:29 +374:129:49:105 +375:30:226:13 +376:116:141:204 +377:180:245:48 +378:197:154:175 +379:71:165:31 +380:139:253:34 +381:86:45:250 +382:246:225:50 +383:21:175:248 +384:9:39:19 +385:136:107:251 +386:45:120:120 +387:97:248:6 +388:111:98:167 +389:80:10:141 +390:143:239:116 +391:112:178:190 +392:232:35:233 +393:184:231:254 +394:173:255:21 +395:106:130:249 +396:177:213:0 +397:149:175:191 +398:127:172:116 +399:242:94:130 +400:46:206:240 +401:107:205:194 +402:4:138:161 +403:215:16:181 +404:145:102:250 +405:116:245:234 +406:130:41:17 +407:240:78:73 +408:47:43:147 +409:200:88:164 +410:94:147:174 +411:14:125:55 +412:105:55:12 +413:159:108:155 +414:159:250:131 +415:19:9:187 +416:60:170:199 +417:186:95:144 +418:12:120:11 +419:40:106:127 +420:110:60:26 +421:232:32:80 +422:232:247:29 +423:20:190:224 +424:93:243:203 +425:173:18:200 +426:79:55:125 +427:176:6:81 +428:109:250:46 +429:5:86:59 +430:55:246:10 +431:127:7:228 +432:22:154:230 +433:78:188:156 +434:13:50:136 +435:98:198:240 +436:145:73:63 +437:77:78:254 +438:241:147:18 +439:113:168:213 +440:37:232:46 +441:79:129:48 +442:152:236:38 +443:238:44:226 +444:26:19:237 +445:121:1:109 +446:198:233:84 +447:91:79:96 +448:65:153:62 +449:34:31:57 +450:109:203:140 +451:179:131:176 +452:38:19:66 +453:100:226:80 +454:109:86:12 +455:189:84:151 +456:219:153:253 +457:186:176:193 +458:241:213:235 +459:133:160:163 +460:106:151:113 +461:203:160:31 +462:106:60:77 +463:236:121:175 +464:223:198:93 +465:78:101:223 +466:52:115:44 +467:103:161:137 +468:248:63:153 +469:15:224:197 +470:203:187:199 +471:206:170:187 +472:115:178:24 +473:22:75:128 +474:176:49:177 +475:82:176:110 +476:250:188:130 +477:239:6:94 +478:229:120:129 +479:153:188:115 +480:181:94:115 +481:12:9:34 +482:10:124:25 +483:238:192:100 +484:115:146:92 +485:79:236:249 +486:26:90:158 +487:152:49:240 +488:127:1:253 +489:173:158:148 +490:206:100:252 +491:84:77:60 +492:4:215:133 +493:245:168:87 +494:149:72:26 +495:231:241:109 +496:36:191:80 +497:182:37:184 +498:54:89:169 +499:211:30:75 +500:163:243:67 +501:71:73:171 +502:11:237:140 +503:50:17:8 +504:51:115:205 +505:232:147:65 +506:226:238:237 +507:168:5:38 +508:100:95:133 +509:14:153:89 +510:122:117:229 +511:122:142:127 +512:50:103:99 +513:197:119:118 +514:160:153:206 +515:197:155:103 +516:51:102:164 +517:55:219:184 +518:236:78:46 +519:209:17:244 +520:99:23:71 +521:81:183:238 +522:36:113:74 +523:139:8:103 +524:150:231:38 +525:141:140:208 +526:52:233:77 +527:25:171:171 +528:130:242:187 +529:210:88:82 +530:207:239:27 +531:144:184:2 +532:110:18:120 +533:128:36:28 +534:174:118:191 +535:202:153:199 +536:131:25:202 +537:145:53:31 +538:107:174:117 +539:91:37:87 +540:181:39:179 +541:226:213:74 +542:1:172:93 +543:170:0:17 +544:86:30:9 +545:152:197:98 +546:235:44:197 +547:35:147:133 +548:64:59:87 +549:166:74:247 +550:223:127:26 +551:147:176:178 +552:165:199:86 +553:114:216:122 +554:46:136:114 +555:149:205:112 +556:125:243:16 +557:160:103:116 +558:193:232:193 +559:49:227:244 +560:147:201:155 +561:239:89:218 +562:187:66:159 +563:140:235:122 +564:52:71:118 +565:72:14:132 +566:175:161:186 +567:193:254:6 +568:125:142:138 +569:222:13:197 +570:132:255:229 +571:215:14:75 +572:73:64:199 +573:119:214:28 +574:245:231:197 +575:212:77:181 +576:188:241:97 +577:209:80:17 +578:167:66:28 +579:246:33:139 +580:119:188:174 +581:40:212:35 +582:237:56:187 +583:10:202:52 +584:96:253:166 +585:221:221:229 +586:39:52:17 +587:222:153:163 +588:20:38:255 +589:57:86:48 +590:187:19:83 +591:216:213:214 +592:244:178:235 +593:88:134:123 +594:182:94:79 +595:177:52:128 +596:210:62:119 +597:41:161:80 +598:10:187:218 +599:68:251:207 +600:63:192:138 +601:154:246:224 +602:108:107:217 +603:135:188:80 +604:123:216:127 +605:217:185:189 +606:210:244:248 +607:164:179:247 +608:0:74:104 +609:189:116:175 +610:143:231:237 +611:41:195:68 +612:179:149:232 +613:75:23:168 +614:20:197:112 +615:99:26:135 +616:176:165:180 +617:93:162:218 +618:76:3:194 +619:210:103:77 +620:184:172:175 +621:102:115:241 +622:155:53:223 +623:189:189:219 +624:66:23:168 +625:89:56:98 +626:123:79:73 +627:237:15:223 +628:36:165:185 +629:176:123:234 +630:188:14:122 +631:126:78:47 +632:228:154:69 +633:145:205:27 +634:94:126:238 +635:217:42:177 +636:168:24:150 +637:19:152:24 +638:21:158:211 +639:199:108:99 +640:190:151:25 +641:166:141:106 +642:97:39:86 +643:168:245:124 +644:209:115:64 +645:153:174:242 +646:80:114:147 +647:202:94:183 +648:177:171:251 +649:190:2:6 +650:218:127:33 +651:220:52:184 +652:197:178:219 +653:49:219:161 +654:223:26:64 +655:138:215:144 +656:24:102:205 +657:62:206:187 +658:200:32:168 +659:42:246:99 +660:5:37:118 +661:105:54:191 +662:192:232:247 +663:156:178:50 +664:137:7:55 +665:143:109:34 +666:150:198:190 +667:56:85:187 +668:44:47:199 +669:50:145:25 +670:57:178:49 +671:233:181:132 +672:147:66:113 +673:219:246:107 +674:215:13:114 +675:95:50:255 +676:173:2:242 +677:160:54:109 +678:156:140:129 +679:106:81:208 +680:127:251:248 +681:205:218:21 +682:11:53:202 +683:106:251:51 +684:151:100:65 +685:167:171:45 +686:2:143:83 +687:235:94:129 +688:18:9:86 +689:84:162:177 +690:87:223:136 +691:86:103:217 +692:82:109:235 +693:223:112:59 +694:18:149:173 +695:201:226:228 +696:72:245:221 +697:97:179:163 +698:79:205:231 +699:16:55:185 +700:98:71:208 +701:66:190:172 +702:187:250:239 +703:181:202:231 +704:25:185:107 +705:165:12:224 +706:175:166:93 +707:59:246:154 +708:34:237:57 +709:234:73:134 +710:244:93:149 +711:112:174:222 +712:55:130:38 +713:85:5:178 +714:28:177:89 +715:231:185:7 +716:137:237:155 +717:212:254:130 +718:129:94:243 +719:141:144:57 +720:204:110:64 +721:230:16:112 +722:230:150:5 +723:91:222:12 +724:231:39:84 +725:160:196:203 +726:175:95:247 +727:50:156:80 +728:63:79:178 +729:210:123:71 +730:199:33:4 +731:205:207:247 +732:167:140:107 +733:79:61:189 +734:22:68:233 +735:216:231:24 +736:238:103:3 +737:81:43:245 +738:163:254:51 +739:38:190:24 +740:13:224:83 +741:3:133:167 +742:85:52:45 +743:7:50:236 +744:243:42:2 +745:34:96:137 +746:73:126:107 +747:191:105:130 +748:248:121:167 +749:195:240:138 +750:215:85:253 +751:27:167:70 +752:226:0:134 +753:64:118:37 +754:47:82:59 +755:39:214:200 +756:159:255:237 +757:54:206:217 +758:147:148:50 +759:3:154:131 +760:136:1:51 +761:174:160:189 +762:56:76:177 +763:219:22:193 +764:86:43:85 +765:100:41:56 +766:198:92:251 +767:253:72:163 +768:173:183:54 +769:44:174:214 +770:113:33:17 +771:13:5:69 +772:73:136:122 +773:121:133:61 +774:186:90:162 +775:92:154:7 +776:209:224:6 +777:0:199:71 +778:206:42:154 +779:94:7:239 +780:63:54:31 +781:130:157:51 +782:129:64:77 +783:66:225:50 +784:241:42:36 +785:175:46:252 +786:239:30:71 +787:165:203:218 +788:87:162:242 +789:46:43:210 +790:185:127:186 +791:103:153:137 +792:65:246:219 +793:193:255:175 +794:224:218:138 +795:166:10:100 +796:205:79:75 +797:142:159:9 +798:72:12:0 +799:209:41:20 +800:242:23:0 +801:130:242:173 +802:211:27:17 +803:220:71:238 +804:180:145:235 +805:214:228:181 +806:31:29:83 +807:154:160:59 +808:236:145:177 +809:188:21:152 +810:230:63:132 +811:48:144:154 +812:187:77:192 +813:27:33:103 +814:250:234:191 +815:174:120:138 +816:148:90:23 +817:126:149:32 +818:226:8:68 +819:192:58:12 +820:235:187:159 +821:20:117:35 +822:29:109:234 +823:138:85:110 +824:133:28:39 +825:216:116:201 +826:249:89:44 +827:154:167:43 +828:102:37:7 +829:217:185:28 +830:23:34:213 +831:255:197:246 +832:89:242:91 +833:157:82:169 +834:134:247:81 +835:127:1:222 +836:40:166:227 +837:167:64:121 +838:129:200:154 +839:238:1:147 +840:96:39:124 +841:33:39:39 +842:207:74:196 +843:161:255:208 +844:200:236:70 +845:155:153:20 +846:190:31:46 +847:10:169:217 +848:102:251:91 +849:249:6:190 +850:196:26:78 +851:102:158:44 +852:54:247:47 +853:115:74:54 +854:187:168:31 +855:153:138:61 +856:113:94:145 +857:190:218:184 +858:192:83:49 +859:35:232:98 +860:125:217:70 +861:81:149:93 +862:176:105:85 +863:52:90:214 +864:127:179:69 +865:203:43:244 +866:70:194:236 +867:226:175:225 +868:129:116:156 +869:120:53:247 +870:219:167:212 +871:131:121:29 +872:74:143:229 +873:251:217:161 +874:196:214:246 +875:30:80:219 +876:193:63:109 +877:14:32:36 +878:74:171:119 +879:3:176:173 +880:5:121:235 +881:108:222:1 +882:81:97:129 +883:130:115:6 +884:21:2:224 +885:216:69:121 +886:144:126:180 +887:192:244:4 +888:31:168:152 +889:64:14:39 +890:113:85:55 +891:174:201:85 +892:111:150:196 +893:2:77:184 +894:142:173:93 +895:27:35:64 +896:254:200:186 +897:87:217:186 +898:207:136:254 +899:242:203:195 +900:157:166:83 +901:213:93:153 +902:42:7:92 +903:134:196:227 +904:196:182:104 +905:22:6:177 +906:0:113:182 +%% diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/subbasins_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/subbasins_wk new file mode 100644 index 0000000000000000000000000000000000000000..40b533c60c0571a2e0f0a285917a34593f42aea5 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/colr/subbasins_wk @@ -0,0 +1,909 @@ +% 1 1950 +%% +1:172:134:93 +2:172:158:165 +3:58:86:32 +4:68:135:103 +5:136:166:114 +6:159:109:68 +7:225:46:101 +8:101:9:214 +9:233:86:32 +10:191:85:71 +11:42:215:12 +12:194:240:227 +13:185:200:16 +14:39:176:138 +15:45:174:114 +16:193:70:55 +17:79:170:37 +18:188:38:162 +19:238:160:72 +20:89:215:116 +21:123:182:246 +22:102:26:71 +23:53:70:90 +24:99:99:54 +25:167:223:101 +26:238:45:105 +27:138:169:114 +28:132:252:176 +29:32:32:81 +30:129:225:213 +31:65:156:99 +32:188:115:233 +33:123:234:112 +34:201:101:138 +35:185:183:41 +36:150:243:155 +37:26:50:64 +38:171:121:165 +39:170:254:54 +40:244:185:175 +41:214:29:36 +42:170:112:237 +43:164:22:209 +44:126:227:95 +45:191:81:148 +46:222:91:4 +47:156:40:159 +48:176:109:240 +49:7:225:116 +50:154:77:51 +51:168:93:76 +52:133:67:19 +53:6:221:142 +54:160:193:216 +55:212:253:76 +56:204:120:224 +57:95:124:35 +58:10:32:118 +59:21:44:194 +60:13:157:119 +61:72:239:164 +62:191:104:98 +63:99:74:138 +64:31:53:59 +65:239:203:134 +66:150:239:14 +67:235:239:91 +68:82:76:180 +69:1:26:60 +70:19:77:252 +71:105:137:12 +72:61:47:9 +73:76:7:71 +74:197:96:73 +75:156:155:0 +76:43:62:31 +77:140:45:111 +78:65:113:219 +79:219:155:164 +80:47:229:90 +81:76:131:29 +82:198:124:49 +83:205:111:30 +84:201:179:246 +85:235:238:200 +86:117:150:205 +87:63:140:9 +88:59:137:9 +89:198:106:149 +90:51:112:72 +91:22:179:99 +92:117:252:32 +93:125:222:4 +94:37:0:251 +95:107:81:144 +96:161:190:123 +97:86:132:205 +98:208:71:77 +99:189:123:127 +100:84:61:239 +101:199:251:209 +102:212:54:166 +103:74:140:243 +104:90:86:96 +105:24:241:51 +106:76:178:247 +107:127:99:229 +108:37:44:224 +109:44:120:143 +110:203:191:236 +111:151:69:13 +112:218:26:112 +113:104:235:75 +114:253:193:184 +115:72:83:136 +116:0:208:89 +117:181:133:18 +118:224:229:131 +119:160:176:156 +120:206:131:45 +121:169:135:104 +122:166:167:122 +123:247:59:159 +124:23:68:104 +125:203:11:94 +126:50:118:123 +127:5:236:255 +128:221:122:166 +129:252:114:110 +130:51:122:100 +131:28:214:99 +132:120:114:137 +133:249:111:106 +134:233:126:255 +135:120:193:246 +136:105:28:49 +137:4:183:151 +138:122:241:148 +139:186:105:204 +140:199:217:94 +141:171:78:223 +142:249:15:19 +143:123:96:102 +144:77:121:46 +145:169:118:43 +146:251:38:228 +147:5:132:88 +148:7:73:119 +149:242:62:215 +150:222:251:86 +151:38:37:18 +152:159:80:201 +153:211:173:71 +154:159:82:168 +155:37:191:196 +156:80:162:11 +157:62:146:159 +158:95:149:191 +159:223:87:100 +160:111:213:183 +161:43:241:151 +162:123:148:109 +163:242:124:210 +164:153:152:188 +165:34:20:117 +166:78:115:98 +167:32:229:34 +168:133:108:247 +169:222:52:29 +170:141:171:187 +171:183:170:2 +172:110:114:85 +173:21:203:75 +174:69:47:43 +175:52:41:58 +176:37:108:145 +177:91:128:233 +178:124:184:210 +179:241:127:89 +180:185:199:142 +181:44:181:131 +182:106:34:161 +183:251:167:120 +184:204:221:89 +185:16:184:223 +186:91:253:110 +187:13:59:30 +188:124:65:197 +189:225:141:177 +190:42:41:163 +191:162:205:74 +192:243:59:171 +193:49:93:116 +194:100:161:134 +195:45:234:152 +196:148:91:197 +197:208:133:93 +198:0:97:44 +199:109:180:131 +200:202:49:46 +201:114:69:113 +202:192:106:7 +203:228:33:210 +204:118:35:129 +205:117:11:191 +206:46:235:17 +207:110:220:22 +208:252:96:148 +209:200:100:176 +210:60:130:156 +211:246:191:87 +212:243:249:216 +213:240:135:129 +214:128:168:193 +215:75:242:215 +216:112:22:87 +217:45:133:219 +218:20:52:105 +219:25:172:247 +220:247:78:82 +221:193:25:130 +222:15:254:2 +223:252:140:63 +224:5:24:125 +225:89:17:49 +226:170:172:202 +227:184:155:129 +228:68:102:224 +229:141:94:142 +230:250:147:185 +231:139:234:37 +232:84:89:83 +233:138:197:64 +234:78:187:22 +235:171:202:136 +236:58:23:158 +237:216:43:39 +238:45:13:161 +239:212:181:134 +240:112:193:52 +241:62:127:172 +242:247:145:96 +243:1:192:31 +244:145:137:147 +245:201:78:63 +246:29:217:17 +247:67:194:62 +248:168:232:66 +249:244:240:232 +250:6:130:51 +251:183:13:157 +252:28:244:110 +253:234:83:252 +254:137:225:185 +255:153:206:205 +256:65:216:41 +257:240:105:248 +258:9:195:84 +259:126:12:90 +260:134:102:73 +261:230:59:116 +262:57:85:100 +263:168:66:20 +264:63:207:226 +265:244:146:54 +266:116:42:131 +267:122:162:112 +268:23:123:104 +269:74:69:111 +270:192:99:182 +271:18:240:22 +272:28:251:108 +273:8:44:10 +274:105:242:57 +275:254:255:124 +276:111:35:249 +277:69:168:40 +278:61:255:238 +279:15:210:184 +280:142:61:147 +281:51:213:177 +282:111:116:106 +283:81:90:91 +284:71:96:212 +285:104:86:13 +286:20:157:164 +287:36:209:129 +288:68:231:172 +289:64:195:248 +290:61:240:67 +291:109:184:238 +292:54:6:60 +293:103:185:122 +294:184:243:138 +295:237:122:91 +296:168:127:85 +297:123:134:254 +298:111:67:235 +299:187:165:215 +300:105:122:155 +301:214:119:131 +302:99:184:45 +303:213:200:81 +304:156:122:57 +305:114:200:246 +306:77:175:68 +307:217:247:58 +308:104:116:71 +309:238:48:168 +310:218:104:180 +311:218:224:80 +312:64:2:200 +313:182:15:227 +314:138:150:169 +315:85:144:126 +316:214:190:66 +317:71:61:159 +318:45:254:158 +319:74:207:231 +320:169:178:1 +321:151:121:91 +322:1:64:176 +323:112:27:10 +324:47:243:244 +325:156:114:14 +326:115:185:135 +327:137:77:7 +328:169:85:43 +329:236:125:69 +330:121:145:232 +331:220:205:8 +332:139:65:242 +333:138:220:79 +334:145:217:226 +335:201:123:196 +336:141:171:149 +337:201:175:155 +338:97:212:155 +339:126:37:38 +340:89:39:186 +341:81:130:43 +342:242:95:190 +343:208:167:19 +344:216:36:92 +345:71:123:41 +346:147:116:140 +347:46:171:80 +348:35:57:114 +349:146:38:160 +350:79:209:132 +351:183:5:138 +352:12:163:37 +353:65:231:77 +354:19:190:184 +355:114:178:120 +356:79:216:175 +357:16:4:155 +358:103:244:182 +359:166:118:36 +360:96:61:221 +361:17:84:182 +362:207:161:24 +363:72:24:80 +364:218:251:43 +365:112:144:191 +366:198:145:176 +367:153:68:251 +368:137:248:127 +369:88:61:38 +370:95:111:91 +371:218:4:11 +372:29:234:140 +373:250:59:29 +374:129:49:105 +375:30:226:13 +376:116:141:204 +377:180:245:48 +378:197:154:175 +379:71:165:31 +380:139:253:34 +381:86:45:250 +382:246:225:50 +383:21:175:248 +384:9:39:19 +385:136:107:251 +386:45:120:120 +387:97:248:6 +388:111:98:167 +389:80:10:141 +390:143:239:116 +391:112:178:190 +392:232:35:233 +393:184:231:254 +394:173:255:21 +395:106:130:249 +396:177:213:0 +397:149:175:191 +398:127:172:116 +399:242:94:130 +400:46:206:240 +401:107:205:194 +402:4:138:161 +403:215:16:181 +404:145:102:250 +405:116:245:234 +406:130:41:17 +407:240:78:73 +408:47:43:147 +409:200:88:164 +410:94:147:174 +411:14:125:55 +412:105:55:12 +413:159:108:155 +414:159:250:131 +415:19:9:187 +416:60:170:199 +417:186:95:144 +418:12:120:11 +419:40:106:127 +420:110:60:26 +421:232:32:80 +422:232:247:29 +423:20:190:224 +424:93:243:203 +425:173:18:200 +426:79:55:125 +427:176:6:81 +428:109:250:46 +429:5:86:59 +430:55:246:10 +431:127:7:228 +432:22:154:230 +433:78:188:156 +434:13:50:136 +435:98:198:240 +436:145:73:63 +437:77:78:254 +438:241:147:18 +439:113:168:213 +440:37:232:46 +441:79:129:48 +442:152:236:38 +443:238:44:226 +444:26:19:237 +445:121:1:109 +446:198:233:84 +447:91:79:96 +448:65:153:62 +449:34:31:57 +450:109:203:140 +451:179:131:176 +452:38:19:66 +453:100:226:80 +454:109:86:12 +455:189:84:151 +456:219:153:253 +457:186:176:193 +458:241:213:235 +459:133:160:163 +460:106:151:113 +461:203:160:31 +462:106:60:77 +463:236:121:175 +464:223:198:93 +465:78:101:223 +466:52:115:44 +467:103:161:137 +468:248:63:153 +469:15:224:197 +470:203:187:199 +471:206:170:187 +472:115:178:24 +473:22:75:128 +474:176:49:177 +475:82:176:110 +476:250:188:130 +477:239:6:94 +478:229:120:129 +479:153:188:115 +480:181:94:115 +481:12:9:34 +482:10:124:25 +483:238:192:100 +484:115:146:92 +485:79:236:249 +486:26:90:158 +487:152:49:240 +488:127:1:253 +489:173:158:148 +490:206:100:252 +491:84:77:60 +492:4:215:133 +493:245:168:87 +494:149:72:26 +495:231:241:109 +496:36:191:80 +497:182:37:184 +498:54:89:169 +499:211:30:75 +500:163:243:67 +501:71:73:171 +502:11:237:140 +503:50:17:8 +504:51:115:205 +505:232:147:65 +506:226:238:237 +507:168:5:38 +508:100:95:133 +509:14:153:89 +510:122:117:229 +511:122:142:127 +512:50:103:99 +513:197:119:118 +514:160:153:206 +515:197:155:103 +516:51:102:164 +517:55:219:184 +518:236:78:46 +519:209:17:244 +520:99:23:71 +521:81:183:238 +522:36:113:74 +523:139:8:103 +524:150:231:38 +525:141:140:208 +526:52:233:77 +527:25:171:171 +528:130:242:187 +529:210:88:82 +530:207:239:27 +531:144:184:2 +532:110:18:120 +533:128:36:28 +534:174:118:191 +535:202:153:199 +536:131:25:202 +537:145:53:31 +538:107:174:117 +539:91:37:87 +540:181:39:179 +541:226:213:74 +542:1:172:93 +543:170:0:17 +544:86:30:9 +545:152:197:98 +546:235:44:197 +547:35:147:133 +548:64:59:87 +549:166:74:247 +550:223:127:26 +551:147:176:178 +552:165:199:86 +553:114:216:122 +554:46:136:114 +555:149:205:112 +556:125:243:16 +557:160:103:116 +558:193:232:193 +559:49:227:244 +560:147:201:155 +561:239:89:218 +562:187:66:159 +563:140:235:122 +564:52:71:118 +565:72:14:132 +566:175:161:186 +567:193:254:6 +568:125:142:138 +569:222:13:197 +570:132:255:229 +571:215:14:75 +572:73:64:199 +573:119:214:28 +574:245:231:197 +575:212:77:181 +576:188:241:97 +577:209:80:17 +578:167:66:28 +579:246:33:139 +580:119:188:174 +581:40:212:35 +582:237:56:187 +583:10:202:52 +584:96:253:166 +585:221:221:229 +586:39:52:17 +587:222:153:163 +588:20:38:255 +589:57:86:48 +590:187:19:83 +591:216:213:214 +592:244:178:235 +593:88:134:123 +594:182:94:79 +595:177:52:128 +596:210:62:119 +597:41:161:80 +598:10:187:218 +599:68:251:207 +600:63:192:138 +601:154:246:224 +602:108:107:217 +603:135:188:80 +604:123:216:127 +605:217:185:189 +606:210:244:248 +607:164:179:247 +608:0:74:104 +609:189:116:175 +610:143:231:237 +611:41:195:68 +612:179:149:232 +613:75:23:168 +614:20:197:112 +615:99:26:135 +616:176:165:180 +617:93:162:218 +618:76:3:194 +619:210:103:77 +620:184:172:175 +621:102:115:241 +622:155:53:223 +623:189:189:219 +624:66:23:168 +625:89:56:98 +626:123:79:73 +627:237:15:223 +628:36:165:185 +629:176:123:234 +630:188:14:122 +631:126:78:47 +632:228:154:69 +633:145:205:27 +634:94:126:238 +635:217:42:177 +636:168:24:150 +637:19:152:24 +638:21:158:211 +639:199:108:99 +640:190:151:25 +641:166:141:106 +642:97:39:86 +643:168:245:124 +644:209:115:64 +645:153:174:242 +646:80:114:147 +647:202:94:183 +648:177:171:251 +649:190:2:6 +650:218:127:33 +651:220:52:184 +652:197:178:219 +653:49:219:161 +654:223:26:64 +655:138:215:144 +656:24:102:205 +657:62:206:187 +658:200:32:168 +659:42:246:99 +660:5:37:118 +661:105:54:191 +662:192:232:247 +663:156:178:50 +664:137:7:55 +665:143:109:34 +666:150:198:190 +667:56:85:187 +668:44:47:199 +669:50:145:25 +670:57:178:49 +671:233:181:132 +672:147:66:113 +673:219:246:107 +674:215:13:114 +675:95:50:255 +676:173:2:242 +677:160:54:109 +678:156:140:129 +679:106:81:208 +680:127:251:248 +681:205:218:21 +682:11:53:202 +683:106:251:51 +684:151:100:65 +685:167:171:45 +686:2:143:83 +687:235:94:129 +688:18:9:86 +689:84:162:177 +690:87:223:136 +691:86:103:217 +692:82:109:235 +693:223:112:59 +694:18:149:173 +695:201:226:228 +696:72:245:221 +697:97:179:163 +698:79:205:231 +699:16:55:185 +700:98:71:208 +701:66:190:172 +702:187:250:239 +703:181:202:231 +704:25:185:107 +705:165:12:224 +706:175:166:93 +707:59:246:154 +708:34:237:57 +709:234:73:134 +710:244:93:149 +711:112:174:222 +712:55:130:38 +713:85:5:178 +714:28:177:89 +715:231:185:7 +716:137:237:155 +717:212:254:130 +718:129:94:243 +719:141:144:57 +720:204:110:64 +721:230:16:112 +722:230:150:5 +723:91:222:12 +724:231:39:84 +725:160:196:203 +726:175:95:247 +727:50:156:80 +728:63:79:178 +729:210:123:71 +730:199:33:4 +731:205:207:247 +732:167:140:107 +733:79:61:189 +734:22:68:233 +735:216:231:24 +736:238:103:3 +737:81:43:245 +738:163:254:51 +739:38:190:24 +740:13:224:83 +741:3:133:167 +742:85:52:45 +743:7:50:236 +744:243:42:2 +745:34:96:137 +746:73:126:107 +747:191:105:130 +748:248:121:167 +749:195:240:138 +750:215:85:253 +751:27:167:70 +752:226:0:134 +753:64:118:37 +754:47:82:59 +755:39:214:200 +756:159:255:237 +757:54:206:217 +758:147:148:50 +759:3:154:131 +760:136:1:51 +761:174:160:189 +762:56:76:177 +763:219:22:193 +764:86:43:85 +765:100:41:56 +766:198:92:251 +767:253:72:163 +768:173:183:54 +769:44:174:214 +770:113:33:17 +771:13:5:69 +772:73:136:122 +773:121:133:61 +774:186:90:162 +775:92:154:7 +776:209:224:6 +777:0:199:71 +778:206:42:154 +779:94:7:239 +780:63:54:31 +781:130:157:51 +782:129:64:77 +783:66:225:50 +784:241:42:36 +785:175:46:252 +786:239:30:71 +787:165:203:218 +788:87:162:242 +789:46:43:210 +790:185:127:186 +791:103:153:137 +792:65:246:219 +793:193:255:175 +794:224:218:138 +795:166:10:100 +796:205:79:75 +797:142:159:9 +798:72:12:0 +799:209:41:20 +800:242:23:0 +801:130:242:173 +802:211:27:17 +803:220:71:238 +804:180:145:235 +805:214:228:181 +806:31:29:83 +807:154:160:59 +808:236:145:177 +809:188:21:152 +810:230:63:132 +811:48:144:154 +812:187:77:192 +813:27:33:103 +814:250:234:191 +815:174:120:138 +816:148:90:23 +817:126:149:32 +818:226:8:68 +819:192:58:12 +820:235:187:159 +821:20:117:35 +822:29:109:234 +823:138:85:110 +824:133:28:39 +825:216:116:201 +826:249:89:44 +827:154:167:43 +828:102:37:7 +829:217:185:28 +830:23:34:213 +831:255:197:246 +832:89:242:91 +833:157:82:169 +834:134:247:81 +835:127:1:222 +836:40:166:227 +837:167:64:121 +838:129:200:154 +839:238:1:147 +840:96:39:124 +841:33:39:39 +842:207:74:196 +843:161:255:208 +844:200:236:70 +845:155:153:20 +846:190:31:46 +847:10:169:217 +848:102:251:91 +849:249:6:190 +850:196:26:78 +851:102:158:44 +852:54:247:47 +853:115:74:54 +854:187:168:31 +855:153:138:61 +856:113:94:145 +857:190:218:184 +858:192:83:49 +859:35:232:98 +860:125:217:70 +861:81:149:93 +862:176:105:85 +863:52:90:214 +864:127:179:69 +865:203:43:244 +866:70:194:236 +867:226:175:225 +868:129:116:156 +869:120:53:247 +870:219:167:212 +871:131:121:29 +872:74:143:229 +873:251:217:161 +874:196:214:246 +875:30:80:219 +876:193:63:109 +877:14:32:36 +878:74:171:119 +879:3:176:173 +880:5:121:235 +881:108:222:1 +882:81:97:129 +883:130:115:6 +884:21:2:224 +885:216:69:121 +886:144:126:180 +887:192:244:4 +888:31:168:152 +889:64:14:39 +890:113:85:55 +891:174:201:85 +892:111:150:196 +893:2:77:184 +894:142:173:93 +895:27:35:64 +896:254:200:186 +897:87:217:186 +898:207:136:254 +899:242:203:195 +900:157:166:83 +901:213:93:153 +902:42:7:92 +903:134:196:227 +904:196:182:104 +905:22:6:177 +906:0:113:182 +%% diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..b31b9d3ae3bf83238d1cbd91c183bb2774a53d1e Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/accum_wk differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..ec043821f4903d53fb3d634adc9388ecb91b48a9 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_aspect differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..8489fbf89589535cf97bcc7ae5278bacfb0ba583 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_filled differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..25ea65b9ec964080daaa25a8adcbb78a067007e0 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_slope differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_wk new file mode 100644 index 0000000000000000000000000000000000000000..f240f294631478584b744d80bbfab5ac74ce4415 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/fcell/dem_wk differ diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/accum_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/accum_wk new file mode 100644 index 0000000000000000000000000000000000000000..90d0fcefc5a95aa3bef9581c4e418508853c9e3c --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/accum_wk @@ -0,0 +1,14 @@ +Tue May 25 16:05:14 2021 +accum_wk +PERMANENT +michael.rabotin +raster +dem_filled + +généré par r.watershed +Processing mode: SFD (D8) +Memory mode: All in RAM + +r.watershed --overwrite -s -b elevation="dem_filled" threshold=20 ac\ +cumulation="accum_wk" drainage="drain_wk" basin="subbasins_wk" strea\ +m="streams_wk" half_basin="halfbasins" convergence=5 memory=300 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/asp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/asp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..65b61ee2b3cad92cd1fc390f516838a86d7bd234 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/asp_rcl @@ -0,0 +1,17 @@ +Tue May 25 16:05:13 2021 +asp_rcl +PERMANENT +michael.rabotin +raster +raster map dem_aspect + +généré par r.recode +recode of raster map dem_aspect +0.0:45.0:1:1 +45.0:135.0:2:2 +135.0:225.0:3:3 +225.0:315.0:4:4 +315.0:360.0:1:1 + +r.recode input="dem_aspect" output="asp_rcl" rules="/home/michael.ra\ +botin/temporaires/data_exempleRGF93/OUT_FILES/reclass_rules_aspect" diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_aspect b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_aspect new file mode 100644 index 0000000000000000000000000000000000000000..7d6c9c5608792f1c4c05d682786bfea7fcddad72 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_aspect @@ -0,0 +1,15 @@ +Tue May 25 16:05:13 2021 +dem_aspect +PERMANENT +michael.rabotin +raster +raster elevation file dem_filled + +généré par r.slope.aspect +aspect map elev = dem_filled +zfactor = 1.00 +min_slope = 0.000000 + +r.slope.aspect --overwrite elevation="dem_filled" slope="dem_slope" \ +aspect="dem_aspect" format="degrees" precision="FCELL" zscale=1.0 mi\ +n_slope=0.0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_filled b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_filled new file mode 100644 index 0000000000000000000000000000000000000000..650053fb0da68622602ba783346013ce62e8cd5b --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_filled @@ -0,0 +1,10 @@ +Tue May 25 16:05:13 2021 +dem_filled +PERMANENT +michael.rabotin +raster + + +généré par r.fill.dir +r.fill.dir --overwrite input="dem_filled" output="dem_filled" direct\ +ion="dir_temp" areas="unfilled_areas" format="grass" diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_rcl new file mode 100644 index 0000000000000000000000000000000000000000..a2e597295ce3efbdc8766488f8a1f60f2ac83d30 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_rcl @@ -0,0 +1,15 @@ +Tue May 25 16:05:13 2021 +dem_rcl +PERMANENT +michael.rabotin +raster +raster map dem_filled + +généré par r.recode +recode of raster map dem_filled +200.0:300.0:1:1 +300.0:400.0:2:2 +400.0:500.0:3:3 +500.0:600.0:4:4 + +r.recode --quiet input="dem_filled" output="dem_rcl" rules="-" diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_slope b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_slope new file mode 100644 index 0000000000000000000000000000000000000000..f17a20ebfcef451fa0255a23c5aa9051b4525dfd --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_slope @@ -0,0 +1,15 @@ +Tue May 25 16:05:13 2021 +dem_slope +PERMANENT +michael.rabotin +raster +raster elevation file dem_filled + +généré par r.slope.aspect +slope map elev = dem_filled +zfactor = 1.00 format = degrees +min_slope = 0.000000 + +r.slope.aspect --overwrite elevation="dem_filled" slope="dem_slope" \ +aspect="dem_aspect" format="degrees" precision="FCELL" zscale=1.0 mi\ +n_slope=0.0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_wk new file mode 100644 index 0000000000000000000000000000000000000000..e728b34ee66c45692fecc9d60daa8ce4b768e4be --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dem_wk @@ -0,0 +1,11 @@ +Tue May 25 16:05:13 2021 +dem_wk +PERMANENT +michael.rabotin +raster + + +généré par r.in.gdal +r.in.gdal --overwrite -o input="/home/michael.rabotin/temporaires/da\ +ta_exempleRGF93/OUT_FILES/step1_dem_cut.tif" output="dem_wk" memory=\ +300 offset=0 num_digits=0 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dir_temp b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dir_temp new file mode 100644 index 0000000000000000000000000000000000000000..a891c63ad29b8d2d79b3c80205ced59e99875984 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/dir_temp @@ -0,0 +1,10 @@ +Tue May 25 16:05:13 2021 +dir_temp +PERMANENT +michael.rabotin +raster + + +généré par r.fill.dir +r.fill.dir --overwrite input="dem_filled" output="dem_filled" direct\ +ion="dir_temp" areas="unfilled_areas" format="grass" diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/drain_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/drain_wk new file mode 100644 index 0000000000000000000000000000000000000000..782c2b39541988e64bdb3bacf7e24613dea54711 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/drain_wk @@ -0,0 +1,14 @@ +Tue May 25 16:05:14 2021 +drain_wk +PERMANENT +michael.rabotin +raster +dem_filled + +généré par r.watershed +Processing mode: SFD (D8) +Memory mode: All in RAM + +r.watershed --overwrite -s -b elevation="dem_filled" threshold=20 ac\ +cumulation="accum_wk" drainage="drain_wk" basin="subbasins_wk" strea\ +m="streams_wk" half_basin="halfbasins" convergence=5 memory=300 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/halfbasins b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/halfbasins new file mode 100644 index 0000000000000000000000000000000000000000..ebb443a9bd53c5fdc122deb400a47a4bfd906ca3 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/halfbasins @@ -0,0 +1,14 @@ +Tue May 25 16:05:14 2021 +halfbasins +PERMANENT +michael.rabotin +raster +dem_filled + +généré par r.watershed +Processing mode: SFD (D8) +Memory mode: All in RAM + +r.watershed --overwrite -s -b elevation="dem_filled" threshold=20 ac\ +cumulation="accum_wk" drainage="drain_wk" basin="subbasins_wk" strea\ +m="streams_wk" half_basin="halfbasins" convergence=5 memory=300 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/slp_rcl b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/slp_rcl new file mode 100644 index 0000000000000000000000000000000000000000..3619beaf0e2688646faa19adc8ffce7080c34641 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/slp_rcl @@ -0,0 +1,18 @@ +Tue May 25 16:05:13 2021 +slp_rcl +PERMANENT +michael.rabotin +raster +raster map dem_slope + +généré par r.recode +recode of raster map dem_slope +0.0:1.0:1:1 +1.0:3.0:3:3 +3.0:5.0:5:5 +5.0:10.0:10:10 +10.0:20.0:20:20 +20.0:30.0:30:30 +30.0:26.8698:27:27 + +r.recode --quiet input="dem_slope" output="slp_rcl" rules="-" diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/streams_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/streams_wk new file mode 100644 index 0000000000000000000000000000000000000000..b5589fea796db5af5585a74f482726a23a0e528f --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/streams_wk @@ -0,0 +1,14 @@ +Tue May 25 16:05:14 2021 +streams_wk +PERMANENT +michael.rabotin +raster +dem_filled + +généré par r.watershed +Processing mode: SFD (D8) +Memory mode: All in RAM + +r.watershed --overwrite -s -b elevation="dem_filled" threshold=20 ac\ +cumulation="accum_wk" drainage="drain_wk" basin="subbasins_wk" strea\ +m="streams_wk" half_basin="halfbasins" convergence=5 memory=300 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/subbasins_wk b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/subbasins_wk new file mode 100644 index 0000000000000000000000000000000000000000..b8b76cdc54e365f5aae319270b9cc46a5a3d5c2f --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/subbasins_wk @@ -0,0 +1,14 @@ +Tue May 25 16:05:14 2021 +subbasins_wk +PERMANENT +michael.rabotin +raster +dem_filled + +généré par r.watershed +Processing mode: SFD (D8) +Memory mode: All in RAM + +r.watershed --overwrite -s -b elevation="dem_filled" threshold=20 ac\ +cumulation="accum_wk" drainage="drain_wk" basin="subbasins_wk" strea\ +m="streams_wk" half_basin="halfbasins" convergence=5 memory=300 diff --git a/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/unfilled_areas b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/unfilled_areas new file mode 100644 index 0000000000000000000000000000000000000000..5b75d6c2b338856fcce787b356fded3b5a7ff542 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/grass_db/grassdata/hru-delin/PERMANENT/hist/unfilled_areas @@ -0,0 +1,10 @@ +Tue May 25 16:05:13 2021 +unfilled_areas +PERMANENT +michael.rabotin +raster + + +généré par r.fill.dir +r.fill.dir --overwrite input="dem_filled" output="dem_filled" direct\ +ion="dir_temp" areas="unfilled_areas" format="grass" diff --git a/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif b/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif new file mode 100644 index 0000000000000000000000000000000000000000..ab8bf76ce81479dddcd5e8faa9b535e95afe5db5 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif differ diff --git a/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif.aux.xml b/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif.aux.xml new file mode 100644 index 0000000000000000000000000000000000000000..e457349baa295b87299dd8e725e7c999898b5162 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/hgeo_tille.tif.aux.xml @@ -0,0 +1,21 @@ +<PAMDataset> + <PAMRasterBand band="1"> + <Histograms> + <HistItem> + <HistMin>0.9965000000000001</HistMin> + <HistMax>8.003500000000001</HistMax> + <BucketCount>1000</BucketCount> + <IncludeOutOfRange>0</IncludeOutOfRange> + <Approximate>0</Approximate> + <HistCounts>195|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|10085|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|3</HistCounts> + </HistItem> + </Histograms> + <Metadata> + <MDI key="STATISTICS_MAXIMUM">8</MDI> + <MDI key="STATISTICS_MEAN">6.8870952056793</MDI> + <MDI key="STATISTICS_MINIMUM">1</MDI> + <MDI key="STATISTICS_STDDEV">0.81653137044805</MDI> + <MDI key="STATISTICS_VALID_PERCENT">100</MDI> + </Metadata> + </PAMRasterBand> +</PAMDataset> diff --git a/irrigation-R-codes/data_exemple_irrigation/hrudelin_config.cfg b/irrigation-R-codes/data_exemple_irrigation/hrudelin_config.cfg new file mode 100644 index 0000000000000000000000000000000000000000..410048038428e4e1e2e7c52e108305f98fc9a175 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/hrudelin_config.cfg @@ -0,0 +1,113 @@ + +# ----------- +# environment +# ----------- + +[dir_in] +dir: +[files_in] +dem: +gauges: + +[data] +hgeo: +landuse: +soil: + +[irrigation] +irrig_rast: + +[dir_out] +files: +results: +# ------------------------- +# 1st step : hru-delin_init +# ------------------------- + +[surface] +#selection: total -> full dem +# polygon -> polygon: name of the shapefile +# coords -> give the coords upper left (west and north) and lower right (east and south) +selection: +polygon: +west: +north: +east: +south: + + +[demfill] +# +# if demfill = yes : depressionless DEM will be generated +# no : no action on input DEM +# +demfill:yes + +# +# if rules_auto_* = yes : rules will be calculated by the module +# if no : fill the corresponding file (reclass_default_rules_*) +# +[reclass_dem] +rules_auto_dem:yes +step_dem: + +[reclass_slope] +rules_auto_slope:yes + +[reclass_aspect] +rules_auto_aspect:yes + +[basin_min_size] +# number of pixels +size= + +# --------------------------- +# 2nd step : hru-delin_basins +# --------------------------- +[auto_relocation] +to_do:yes +# first rule +surface_tolerance_1= +distance_tolerance_1= +# second rule +surface_tolerance_2= +distance_tolerance_2= + +# drained surface +gauge_area_col_name=S_BH + +# unit = 1 : m , = 2 : km +gauge_area_unit= + + +relocated_gauges: + +[for_watershed_id] +# column name of the gauge attribute (attribute type must be numeric) +# used for identification of watersheds +col_name= +# --------------------------- +# 3rd step : hru-delin_hrugen +# --------------------------- + +[hrus_min_surface] +# +surface= + +# +# MNT-derived layers to be integrated in the overlay operation +# +[layer_overlay] +dem:x +slope:x +aspect:x + +# -------------------------------- +# 4th step : hru-delin_parms_J2000 +# -------------------------------- +[topology] +dissolve_cycle:y + + + + diff --git a/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif b/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif new file mode 100644 index 0000000000000000000000000000000000000000..fa67bb19f7bcf9b9554c95b7e9259084e97b8225 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif differ diff --git a/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif.aux.xml b/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif.aux.xml new file mode 100644 index 0000000000000000000000000000000000000000..341ec4b941e7cc5cc1d546f206787757786920a5 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/landuse_tille.tif.aux.xml @@ -0,0 +1,21 @@ +<PAMDataset> + <PAMRasterBand band="1"> + <Histograms> + <HistItem> + <HistMin>0.998</HistMin> + <HistMax>5.002</HistMax> + <BucketCount>1000</BucketCount> + <IncludeOutOfRange>0</IncludeOutOfRange> + <Approximate>0</Approximate> + <HistCounts>4|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|5019|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|109|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|4075|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|49</HistCounts> + </HistItem> + </Histograms> + <Metadata> + <MDI key="STATISTICS_MAXIMUM">5</MDI> + <MDI key="STATISTICS_MEAN">2.9147579948142</MDI> + <MDI key="STATISTICS_MINIMUM">1</MDI> + <MDI key="STATISTICS_STDDEV">0.99922961130532</MDI> + <MDI key="STATISTICS_VALID_PERCENT">100</MDI> + </Metadata> + </PAMRasterBand> +</PAMDataset> diff --git a/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif b/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif new file mode 100644 index 0000000000000000000000000000000000000000..4d4aab4f563b7989e684de8f6e8c431cd3ac2549 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif differ diff --git a/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif.aux.xml b/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif.aux.xml new file mode 100644 index 0000000000000000000000000000000000000000..a6b06d287d0b980070cbc6a303a01402f77e7ea4 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/mnt_tille.tif.aux.xml @@ -0,0 +1,11 @@ +<PAMDataset> + <PAMRasterBand band="1"> + <Metadata> + <MDI key="STATISTICS_MAXIMUM">538.97302246094</MDI> + <MDI key="STATISTICS_MEAN">414.83007541956</MDI> + <MDI key="STATISTICS_MINIMUM">253.27299499512</MDI> + <MDI key="STATISTICS_STDDEV">53.2788953045</MDI> + <MDI key="STATISTICS_VALID_PERCENT">100</MDI> + </Metadata> + </PAMRasterBand> +</PAMDataset> diff --git a/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif b/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif new file mode 100644 index 0000000000000000000000000000000000000000..8779ddb6c7a1d99426a74470e74e68a8f3f74684 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif differ diff --git a/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif.aux.xml b/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif.aux.xml new file mode 100644 index 0000000000000000000000000000000000000000..ae018d45b9da34ff9e751fb5397b516c1b84d9ba --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/soil_tille.tif.aux.xml @@ -0,0 +1,21 @@ +<PAMDataset> + <PAMRasterBand band="1"> + <Histograms> + <HistItem> + <HistMin>0.999</HistMin> + <HistMax>3.001</HistMax> + <BucketCount>1000</BucketCount> + <IncludeOutOfRange>0</IncludeOutOfRange> + <Approximate>0</Approximate> + <HistCounts>7229|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|3419</HistCounts> + </HistItem> + </Histograms> + <Metadata> + <MDI key="STATISTICS_MAXIMUM">3</MDI> + <MDI key="STATISTICS_MEAN">1.6440646130729</MDI> + <MDI key="STATISTICS_MINIMUM">1</MDI> + <MDI key="STATISTICS_STDDEV">0.93451056726665</MDI> + <MDI key="STATISTICS_VALID_PERCENT">100</MDI> + </Metadata> + </PAMRasterBand> +</PAMDataset> diff --git a/irrigation-R-codes/data_exemple_irrigation/stations_tille.cpg b/irrigation-R-codes/data_exemple_irrigation/stations_tille.cpg new file mode 100644 index 0000000000000000000000000000000000000000..3ad133c048f2189041151425a73485649e6c32c0 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/stations_tille.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/stations_tille.dbf b/irrigation-R-codes/data_exemple_irrigation/stations_tille.dbf new file mode 100644 index 0000000000000000000000000000000000000000..1f172354d6980e1b77fb311943ba2fdbfbc8f600 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/stations_tille.dbf differ diff --git a/irrigation-R-codes/data_exemple_irrigation/stations_tille.prj b/irrigation-R-codes/data_exemple_irrigation/stations_tille.prj new file mode 100644 index 0000000000000000000000000000000000000000..ae0206b68de2ed81139b89a08ddd36a6b0ed7e35 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/stations_tille.prj @@ -0,0 +1 @@ +PROJCS["RGF_1993_Lambert_93",GEOGCS["GCS_RGF_1993",DATUM["D_RGF_1993",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",6600000.0],PARAMETER["Central_Meridian",3.0],PARAMETER["Standard_Parallel_1",49.0],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/irrigation-R-codes/data_exemple_irrigation/stations_tille.shp b/irrigation-R-codes/data_exemple_irrigation/stations_tille.shp new file mode 100644 index 0000000000000000000000000000000000000000..d567e7f86e51343b7135d36ce3f24b276ac7f815 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/stations_tille.shp differ diff --git a/irrigation-R-codes/data_exemple_irrigation/stations_tille.shx b/irrigation-R-codes/data_exemple_irrigation/stations_tille.shx new file mode 100644 index 0000000000000000000000000000000000000000..caa62774188cbdddb9bd5846f807fab7b9e87128 Binary files /dev/null and b/irrigation-R-codes/data_exemple_irrigation/stations_tille.shx differ diff --git a/irrigation-R-codes/data_exemple_irrigation/tille_allstep_config.cfg b/irrigation-R-codes/data_exemple_irrigation/tille_allstep_config.cfg new file mode 100644 index 0000000000000000000000000000000000000000..6a8ff254c6a8b5f102e63dbca486f1f0c2ccdf38 --- /dev/null +++ b/irrigation-R-codes/data_exemple_irrigation/tille_allstep_config.cfg @@ -0,0 +1,112 @@ + +# ----------- +# environment +# ----------- + +[dir_in] +dir:/home/michael.rabotin/temporaires/data_exempleRGF93 +[files_in] +dem:mnt_tille.tif +gauges:stations_tille.shp + +[data] +hgeo:hgeo_tille.tif +landuse:landuse_tille.tif +soil:soil_tille.tif + +[irrigation] +irrig_rast: + +[dir_out] +files:/home/michael.rabotin/temporaires/data_exempleRGF93/OUT_FILES +results:/home/michael.rabotin/temporaires/data_exempleRGF93/OUT_RESULTS +# ------------------------- +# 1st step : hru-delin_init +# ------------------------- + +[surface] +#selection: total -> full dem +# polygon -> polygon: name of the shapefile +# coords -> give the coords upper left (west and north) and lower right (east and south) +selection:polygon +polygon:bv_tille.shp +west: +north: +east: +south: + + +[demfill] +# +# if demfill = yes : depressionless DEM will be generated +# no : no action on input DEM +# +demfill:yes + +# +# if rules_auto_* = yes : rules will be calculated by the module +# if no : fill the corresponding file (reclass_default_rules_*) +# +[reclass_dem] +rules_auto_dem:yes +step_dem:100 + +[reclass_slope] +rules_auto_slope:yes + +[reclass_aspect] +rules_auto_aspect:yes + +[basin_min_size] +# number of pixels +size=20 + +# --------------------------- +# 2nd step : hru-delin_basins +# --------------------------- +[auto_relocation] +to_do:yes +# first rule +surface_tolerance_1=1 +distance_tolerance_1=1 +# second rule +surface_tolerance_2=2 +distance_tolerance_2=2 + +# drained surface +gauge_area_col_name=S_BH + +# unit = 1 : m , = 2 : km +gauge_area_unit=1 + +relocated_gauges: + +[for_watershed_id] +# column name of the gauge attribute (attribute type must be numeric) +# used for identification of watersheds +col_name=S_BH +# --------------------------- +# 3rd step : hru-delin_hrugen +# --------------------------- + +[hrus_min_surface] +# +surface=1000 + +# +# MNT-derived layers to be integrated in the overlay operation +# +[layer_overlay] +dem:x +slope:x +aspect:x + +# -------------------------------- +# 4th step : hru-delin_parms_J2000 +# -------------------------------- +[topology] +dissolve_cycle:y + + + + diff --git a/irrigation-R-codes/irrigation-R-codes.Rproj b/irrigation-R-codes/irrigation-R-codes.Rproj new file mode 100644 index 0000000000000000000000000000000000000000..8e3c2ebc99e2e337f7d69948b93529a437590b27 --- /dev/null +++ b/irrigation-R-codes/irrigation-R-codes.Rproj @@ -0,0 +1,13 @@ +Version: 1.0 + +RestoreWorkspace: Default +SaveWorkspace: Default +AlwaysSaveHistory: Default + +EnableCodeIndexing: Yes +UseSpacesForTab: Yes +NumSpacesForTab: 2 +Encoding: UTF-8 + +RnwWeave: Sweave +LaTeX: pdfLaTeX diff --git a/irrigation-R-codes/lib.zip b/irrigation-R-codes/lib.zip new file mode 100644 index 0000000000000000000000000000000000000000..b82e59d0b11cb1d13caa8e1fd5c9f14534f66fc5 Binary files /dev/null and b/irrigation-R-codes/lib.zip differ diff --git a/irrigation-R-codes/lib/rgate.R b/irrigation-R-codes/lib/rgate.R new file mode 100644 index 0000000000000000000000000000000000000000..77ec77682daf3cb0c03e6ed141b20c6aa8165b05 --- /dev/null +++ b/irrigation-R-codes/lib/rgate.R @@ -0,0 +1 @@ +source("lib/rgate/Stub.R") diff --git a/irrigation-R-codes/lib/rgate/Argument.R b/irrigation-R-codes/lib/rgate/Argument.R new file mode 100644 index 0000000000000000000000000000000000000000..54e8decabeebb887694a4f2447705d31c04f4068 --- /dev/null +++ b/irrigation-R-codes/lib/rgate/Argument.R @@ -0,0 +1,148 @@ +library(R6) + +# class Argument avec R6 +Argument <- R6Class("Argument", + list( # attributs et méthods public + name = "", + value = NULL, + + initialize = function(name) { + stopifnot(is.character(name), length(name) == 1) + + self$name <- name + } + ) +) + +# class Parameter avec R6 +Parameter <- R6Class("Parameter", inherit = Argument, +public = list( + initialize = function(name, value) { + stopifnot(is.character(value), length(value) == 1) + + self$value <- value + super$initialize(name = name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + return(NULL) + }, + + getValueAsNumeric = function() { + return(as.numeric(self$value)) + }, + + getValueAsList = function() { + return(as.list(fromJSON(self$value))) + }, + + serialize = function() { + return(list( + "Parameter" = list( + "name" = self$name, + "value" = self$value + ) + )) + } +)) + + + +# class Dictionary avec R6 +Dictionary <- R6Class("Dictionary", inherit = Argument, +public = list( + value = list(), + initialize = function(name) { + self$value = list() + + super$initialize(name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + addArgument = function(argument) { + self$value = append(self$value, argument) + invisible(self) + }, + + addParameter = function(name, value) { + self$addArgument(Parameter$new(name, value)) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + return(arg) + } + + return(NULL) + }, + + getParameter = function(name) { + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + if(class(arg) == "Parameter") + return(arg) + } + }, + + serialize = function() { + dico <- list( + "Dictionary" = list( + "name" = self$name + ) + ) + + value = list() + for(val in self$value) { + value = append(value, val$serialize()) + } + dico$Dictionary$value = value + + return(dico) + }, + + deserialize = function(dico) { + value_json = dico + + # Dictionary + for(i in seq_along(value_json$Dictionary$name)) { + + if(!is.null(value_json$Dictionary$name[[i]]) && !is.null(value_json$Dictionary$value[[i]])) { + dictionary = Dictionary$new(value_json$Dictionary$name[[i]]) + dictionary = dictionary$deserialize(value_json$Dictionary$value[[i]]) + + self$value = append(self$value, dictionary) + } + } + + # Paramater + for(i in seq_along(value_json$Parameter$name)) { + + if(!is.null(value_json$Parameter$name[[i]]) && !is.null(value_json$Parameter$value[[i]])) { + parameter = Parameter$new(value_json$Parameter$name[[i]], value_json$Parameter$value[[i]]) + + self$value = append(self$value, parameter) + } + } + + invisible(self) + } +)) diff --git a/irrigation-R-codes/lib/rgate/File.R b/irrigation-R-codes/lib/rgate/File.R new file mode 100644 index 0000000000000000000000000000000000000000..c91f8c0bd036438cce1c8af779c7d5dd3100b039 --- /dev/null +++ b/irrigation-R-codes/lib/rgate/File.R @@ -0,0 +1,180 @@ +library(R6) + +File <- R6Class("File", +public=list( + name = "", + path = "", + actif = TRUE, + + initialize = function(path, name) { + stopifnot(is.character(name), length(name) == 1) + stopifnot(is.character(path), length(path) == 1) + + self$actif=TRUE + + self$name = name + self$path = path + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + return(list( + "File" = list( + "name" = self$name, + "path" = self$path + ) + )) + } + return(list()) + } +)) + + + +ExecFile <- R6Class("ExecFile", inherit = File, +public = list( + cmd = "", + initialize = function(path, name, cmd) { + stopifnot(is.character(cmd), length(cmd) == 1) + + self$cmd = cmd + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$ExecFile = dico$File + dico$File = NULL + + dico$ExecFile$cmd = self$cmd + + return(dico) + } + return(list()) + } +)) + + + +OutputFile <- R6Class("OutputFile", inherit = File, +public = list( + initialize = function(path, name) { + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$OutputFile = dico$File + dico$File = NULL + + return(dico) + } + return(list()) + }, + + read = function() { + if(self$actif){ + json_line = paste0(readLines(paste0(self$path, self$name))) + line = fromJSON(json_line) + + return(line) + } + return("") + }, + + readAsDictionary = function() { + if(self$actif){ + dictionary_dict = Dictionary$new("outputFile") + + file = self$read() + output_dico = Dictionary$new("Outputs") + for(i in seq_along(file$Outputs)) { + dico = Dictionary$new(file$Outputs$Dictionary$name[[i]]) + dico$deserialize(file$Outputs$Dictionary$value[[i]]) + + output_dico$addArgument(dico) + } + dictionary_dict$addArgument(output_dico) + + error_dico = Dictionary$new("Error") + error_dico$addParameter("code", file$Error$code) + error_dico$addParameter("description", file$Error$description) + error_dico$addParameter("traceback", file$Error$traceback) + + dictionary_dict$addArgument(error_dico) + + check_dico = Dictionary$new("Check") + for(i in seq_along(file$Check)) { + dataFile_dico = Dictionary$new(file$Check$DataFile$fileName[[i]]) + dataFile_dico$addParameter("name", file$Check$DataFile$fileName[[i]]) + + dataFile_dico$addParameter("nbMissing", file$Check$DataFile$nbMissing[[i]]) + if(!length( file$Check$DataFile$missing[[i]]) == 0) + dataFile_dico$addParameter("missing", file$Check$DataFile$missing[[i]]) + + dataFile_dico$addParameter("nbIncorrect", file$Check$DataFile$nbIncorrect[[i]]) + if(!length( file$Check$DataFile$incorrect[[i]]) == 0) + dataFile_dico$addParameter("missing", file$Check$DataFile$incorrect[[i]]) + + check_dico$addArgument(dataFile_dico) + } + + dictionary_dict$addArgument(check_dico) + return(dictionary_dict) + } + return(Dictionary$new("")) + }, + + displayContent = function() { + if(self$actif){ + json_line = prettify(toJSON(self$read())) + print(json_line) + return(json_line) + } + return("") + }, + + writeOutput = function(dictionary) { + if(self$actif){ + file = toJSON(self$read()) + + to_add = paste0('"Outputs":[', improveSerialize(toJSON(dictionary$serialize())), ',') + file = paste0(append(strsplit(file, '"Outputs":\\[')[[1]], to_add, after=1)) + + write(prettify(file), paste0(self$path, self$name)) + } + invisible(self) + } + +)) diff --git a/irrigation-R-codes/lib/rgate/Stub.R b/irrigation-R-codes/lib/rgate/Stub.R new file mode 100644 index 0000000000000000000000000000000000000000..36d9253b4f5861746a62d773bab76d8da82db33f --- /dev/null +++ b/irrigation-R-codes/lib/rgate/Stub.R @@ -0,0 +1,237 @@ +library(R6) +library(jsonlite) + +source("lib/rgate/File.R") +source("lib/rgate/Argument.R") + +improveSerialize = function(serializedLine) { + serializedLine = gsub(':\\["', ':"', serializedLine) + serializedLine = gsub('"]', '"', serializedLine) + serializedLine = gsub('"]]', '"]', serializedLine) + serializedLine = gsub('"]}', '"}', serializedLine) + serializedLine = gsub( '"],','",', serializedLine) + return(serializedLine) + } +improveDeSerialize = function(serializedLine) { + serializedLine = gsub( ':"', ':\\["', serializedLine) + serializedLine = gsub( ': "', ':\\["', serializedLine) + serializedLine = gsub( '"]', '"]]', serializedLine) + serializedLine = gsub( '"}', '"]}', serializedLine) + serializedLine = gsub( '",', '"],', serializedLine) + return(serializedLine) + } + +Stub <- R6Class("Stub", +public = list( + outputFile = NA, + dictionaries = list(), + dataFiles = list(), + actif = TRUE, + + initialize = function(outputFile = NA) { + if(is.na(outputFile)) + outputFile = OutputFile$new("","") + + self$actif=TRUE + + self$outputFile = outputFile + }, + + displayDictionaries = function() { + if(self$actif) { + dictionaries = list() + dico = list() + + for(dictionary in self$dictionaries) + dictionaries = append(dictionaries, dictionary$serialize()) + + dico$Dictionaries = dictionaries + json_line = toJSON(dico) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayOutputFile = function() { + if(self$actif) { + json_line = toJSON(self$outputFile$serialize()) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayAll = function() { + if(self$actif) { + invisible(paste0(self$displayOutputFile(), self$displayDictionaries())) + } + invisible("") + }, + + getArgument = function(name) { + if(self$actif) { + return(self$findArgumentWithName(name)) + } + return(NULL) + }, + + findArgumentWithName = function(name) { + if(self$actif) { + dico = NULL + for(dictionary in self$dictionaries) { + dico = dictionary$getArgument(name) + if(!is.null(dico)) + break + } + + return(dico) + } + return(NULL) + }, + + findDictionaryWithName = function(name) { + if(self$actif) {- + for(dico in self$dictionaries){ + if(dico$name == name) + return(dico) + } + } + return(NULL) + } +)) + + +SenderStub <- R6Class("SenderStub", inherit = Stub, +public = list( + execFile = NA, + initialize = function(execFile=NA, outputFile=NA) { + super$initialize(outputFile = outputFile) + self$execFile = execFile + }, + + run = function(gatePath, gateName) { + + print(" =============== Running gateway =============== ") + + system(paste0(gatePath , gateName , " '" , self$serialize() , "'")) + print(" =============== Gateway ending ================ ") + + invisible(self) + }, + + displayExecFile = function() { + + json_line = toJSON(self$execFile$serialize()) + + print(json_line) + return(json_line) + + return("") + }, + + displayAll = function() { + + invisible(paste0(self$displayExecFile(), super$displayAll())) + + invisible("") + }, + + + serialize = function() { + + dico = list( + "ExecFile" = self$execFile$serialize()$ExecFile, + "OutputFile" = self$outputFile$serialize()$OutputFile + ) + + dictionaries = list() + + for(dictionary in self$dictionaries) { + dictionaries = append(dictionaries, dictionary$serialize()) + } + + #dataFiles = list() + + #for(dataFile in self$dataFiles) { + # append(dataFiles, dataFile$serialize()) + #} + + dico$Dictionaries = dictionaries + #dico$DataFiles = dataFile + + return(improveSerialize(toJSON(dico))) + + return("") + } +)) + + +ReceiverStub <-R6Class("ReceiverStub", inherit = Stub, +public = list( + + initialize = function() { + super$initialize(NA) + + self$deserialize(self$readArguments()) + }, + + readArguments = function() { + if(length(commandArgs(TRUE)) > 0) + return(paste(commandArgs(TRUE),collapse = ' ')) + + self$actif = FALSE + self$outputFile$actif = FALSE + + for(dico in self$dictionaries) { + dico$actif = FALSE + } + + # for(dataFile in self$dataFiles) { + # dataFile$actif = FALSE + # } + + return(-1) + }, + + deserialize = function(json_line) { + if(self$actif) { + print(json_line) + if(json_line != -1) { + + json_line = improveDeSerialize(json_line) + data_line = fromJSON(json_line) + + self$outputFile = OutputFile$new(data_line$OutputFile$path[[1]], data_line$OutputFile$name[[1]]) + + #dataFiles_json = data_line$DataFiles + + #for(dataFile_json in dataFiles_json) { + # dataFile = DataFile(dataFile_json$DataFile$path, dataFile_json$DataFile$name) + # + # for(data_json in dataFile_json$DataFile$Content) { + # dataFile$addData(Data(data_json$Data$name, stringToType(data_json$Data$type))) + + # append(self$dataFiles, dataFile) + # } + #} + + dictionaries_json = data_line$Dictionaries + + for (i in seq_along(dictionaries_json)) { + dictionary = Dictionary$new(dictionaries_json[[i]]$name[[1]]) + dictionary$deserialize(dictionaries_json[[i]]$value[[1]]) + + self$dictionaries = append(self$dictionaries, dictionary) + } + } + } + + invisible(self) + } + +)) diff --git a/irrigation-R-codes/lib/utilitaire_irrigation.R b/irrigation-R-codes/lib/utilitaire_irrigation.R new file mode 100644 index 0000000000000000000000000000000000000000..bb249835e43ad61269626a7c0bec441040c0d48e --- /dev/null +++ b/irrigation-R-codes/lib/utilitaire_irrigation.R @@ -0,0 +1,455 @@ +# ============================================================================== +# By Theo L. intern at INRAE +# CREATED on May 16, 2022 +# +# +# ------------------------------------------------------------------------------ +# the objective is to regroup the essential function of the MDR_irrigated project +# in an easy to maintain and well documented file +# +# most of the functions that are in this file are comming from another R source +# file, to keep track of those the "From" boxes indicate the name of the original +# file +# ============================================================================== + +library(zoo) +library(xts) + + + +# ========================== +# ** From MDR_utilitaires ** +# ========================== + + +# -------------------------------add_param-------------------------------------- +# **** add an extra parameter to reach.par + +# add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) +# Adds a new parameter to the given reach file, with the given value and unit. +# Args: +# inputdir: The input directory +# oldreachfile: The old reach file +# newreachfile: The new reach file +# newparamName: The name of the new parameter +# newparamVal: The value of the new parameter +# newparamUnit: The unit of the new parameter + +# The point of this function is to add a new parameter to an oldreachfile and create a newreachfile with the new parameter included. +# The function takes the inputdir (input directory), oldreachfile, newreachfile, newparamName, newparamVal, and newparamUnit as arguments. +# it then starts by finding the number of lines in the oldreachfile, then reads the header line and finds the line where the ID is located. +# Then the function reads in the oldreachfile, adds the new paramName to the file, and creates a new Min, Max, and Unit file with the new parameter included. +# Finally, the function writes the newreachfile with the new parameter included. +# ------------------------------------------------------------------------------ +add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) { + + nbLines <- skip_lines(inputdir,oldreachfile) + headerReach <- readLines(paste0(inputdir, oldreachfile), n = nbLines) + LinesNames <- which(substr(headerReach,1,2)=="ID") + Names <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames-1) + Names <- cbind(Names,newparamName) + Min <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames) + Min <- cbind(Min,0,0) + Max <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+1) + Max <- cbind(Max,9999999,9999999) + Unit <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+2) + Unit <- cbind(Unit,newparamUnit) + reach <- Chargement_param(inputdir,oldreachfile) + reach <- cbind(reach,newparamVal) + + write.table (Names, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=F) + write.table (Min, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Max, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Unit, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (reach, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) +} + + +# -------------------------------Chargement_param------------------------------- +# Chargement_param <- function(chemin,Name) +# Loads all parameters from a given file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The parameters as a data frame + +# - The code is able to identify the line with the first values and skip the initial text lines +# Caveats: it may not work for files with less than 3 lines of data. +# ------------------------------------------------------------------------------ +Chargement_param <- function(chemin, Name) { + # initialization + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + + # loop until we find a line with 3 numeric value + while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0) { + + obj <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k, colClasses="character"))[1] + obj2 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+1, colClasses="character"))[1] + obj3 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+2, colClasses="character"))[1] + k <- k+1 + } + + # get the number of line to skip to get the data + nbLines <- k - 1 + + # load the data + data <- read.table(paste0(chemin, Name), skip=nbLines) + mycolnames <- apply(read.table(paste0(chemin, Name), nrow=1)[1,], 1, as.character) + colnames(data) <- mycolnames + + return(data) +} + + +# -------------------------------write_new_paramfile---------------------------- +# write_new_paramfile=<-(oldfile, newvalues ,newfile) +# Writes the new combination of HRUs to the original parameter file. +# Args: +# oldfile: The original parameter file +# newvalues: A vector of the HRUs to write to the new parameter file +# newfile: The new parameter file + +# write a modified params file using the header of the old one +# ------------------------------------------------------------------------------ +write_new_paramfile <- function(oldfile, newvalues ,newfile) { + # get the header + nb_lines <- skip_lines(oldfile) + header <- readLines(oldfile, n = nb_lines) + + # write + write.table(header, newfile, sep = '\t', col.names = F, row.names = F, quote = F) + write.table(newvalues, newfile, col.names = F, row.names = F, quote = F, append = TRUE, sep = '\t') +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(file) +# Finds the number of lines to skip before the data starts in file. +# Args: +# file: The file to be read +# Returns: +# The number of lines to skip before the data starts in file +# ------------------------------------------------------------------------------ +skip_lines <- function(file){ + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + while (length(na.omit(obj)) == 0 | length(na.omit(obj2)) == 0 | length(na.omit(obj3)) == 0) { + + obj <- as.numeric(read.table(file, nrow = 1, skip = k, colClasses = "character"))[1] + obj2 <- as.numeric(read.table(file, nrow = 1, skip = k + 1, colClasses = "character"))[1] + obj3 <- as.numeric(read.table(file, nrow = 1, skip = k + 2, colClasses = "character"))[1] + + k <- k + 1 + } + return(k - 1) +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(chemin, Name) +# Finds the number of lines to skip in order to reach the data in a file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The number of lines to skip +# ------------------------------------------------------------------------------ +skip_lines <- function(chemin, Name) { + return(skip_lines(paste0(chemin, Name))) +} + + +# -------------------------------luid2cult-------------------------------------- +# luid2cult <- function(vect_luid) +# Converts a vector of J2000 culture codes to their corresponding name. +# Args: +# vect_luid: A vector of J2000 culture codes +# Returns: +# A vector containing the corresponding names + +# The point of this function is to match a vector of numbers with a vector of strings. +# ------------------------------------------------------------------------------ +luid2cult <- function(vect_luid) { + cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") + numJ2000_cultures <- 19:31 + res <- apply(as.matrix(vect_luid), 2, function(X){cultures[match(X, numJ2000_cultures)]}) + return(as.vector(res)) # Not sure this is necessary... +} + +vec <- luid2cult(18:25) + + +# =================================== +# ** From readwritefunctions_J2000 ** +# =================================== + +# -------------------------------ReadLoopDaily---------------------------------- +# ReadLoopDaily <- function(folder, file,filtre) +# Reads a "daily file" and returns the data and the corresponding dates. +# Args: +# folder: The folder where the file is located +# file: The name of the file +# filtre: A logical indicating whether to filter the data or not +# Returns: +# A list containing the dates and the data + +# 1. it starts by reading the length of the first data block and the number of blocks in the file +# 2. then it reads the file's headers (column names) +# 3. finally it reads the data block by block, adding a day to the date at each block, until the end of the file +# ------------------------------------------------------------------------------ +ReadLoopDaily <- function(folder, file, filtre) { + # Open the file + con <- file(paste0(folder, file)) + open(con) + + # Be careful as we remain in the same connection we must count the lines from the current line read (not from the beginning of the file) + + # Read the nb of elements (HRUs or Reaches) (length of the blocks) + Lblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Lblocks <- Lblocks[,3] + + # Get the nb of time steps fo the simulation (nb of blocks of the file) + Nblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Nblocks <- Nblocks[,3] + + # Get the col names (names of the simulated variables) + if (filtre == T) {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3)} else {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2)} + #Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2) + + # Get the start date of the simulation (we consider only the date -> daily time step) + Datestart <- read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + Datestart <- Datestart[,2] + if (filtre == T) { + read.table(con, nrows = 1, sep = "\t") + count <- length(Colnames)+1 + compt <- 0 + while (count == (length(Colnames)+1)) { + obj <- read.table(con, nrows = 1, sep = "\t") + count <- dim(obj)[2] + compt <- compt + 1 + } + Lblocks <- compt-1 + con<-file(paste0(folder, file)) + open(con) + read.table(con, nrows = 1, sep = "\t", skip = 1) + read.table(con, nrows = 1, sep = "\t", skip = 1) + Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3) + # Get the start date of the simulation (we consider only the date -> daily time step) + read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + } + + # Read the data + # Initialisation of a matrix of the correct size to store the data + # nrow = nb of time steps * nb of elts + # ncol = nb of simulated variables + ncol <-length(Colnames) + Data <- matrix(nrow=Nblocks*Lblocks,ncol=ncol) + # Loop on the nb of blocks + for (i in 0:(Nblocks -1)) + { + # Read the block of data + # if i=0 (first block skip only 1 line) + if(i==0) + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 1, colClasses="numeric") + # else skip 3 lines + else + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 3, colClasses="numeric") + + # Add the values to the matrix + Data[(i*Lblocks+1):((i+1)*Lblocks),1:ncol] <- as.matrix(Datatemp) + } + + # close the file + close(con) + + # Add the colnames + colnames(Data) <- Colnames + # Create the corresponding vector of dates + dates <- as.character(seq(from = as.Date(Datestart), length.out = Nblocks, by = "day")) + + # Return the vector of dates and the data as a list + list(dates=dates, Data=Data) +} + + + +# ======================== +# ** From zoo_functions ** +# ======================== + + +# -------------------------------aggregateZoo----------------------------------- +# aggregateZoo <- function (z, timeStep, sumOrMeanFunction) +# Aggregates the given zoo object over the given time step. +# Args: +# z: A zoo object +# timeStep: The time step over which to aggregate the zoo object +# Possible values: ["dmy","my","m","sy","s"] +# sumOrMeanFunction: The function to apply to the aggregated zoo object +# Possible values: [sum,mean] +# Returns: +# The aggregated zoo object + +# The point of this R function is to aggregate data over different time steps. +# The different time steps that are supported are "dmy", "my", "m", "sy", and "s". +# For each time step, the function will either take the sum or mean of the data. +# ------------------------------------------------------------------------------ +aggregateZoo <- function (z, timeStep, sumOrMeanFunction) { + #Retourne un nouveau objet zoo aggr?g? sur le pas de temps timeStep + #(["dmy","my","m","sy","s"]) en faisant la somme ou la moyenne ([sum,mean]) + + if(timeStep == "dmy"){ + return (aggregate(z, time(z) - as.numeric(time(z)) %% 1, sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "my"){ + return (aggregate(z, as.Date(as.yearmon(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "y"){ + return (aggregate(z, format(as.Date(index(z)), '%y'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "m"){ + return (aggregate(z, format(as.Date(index(z)), '%m'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "sy"){ + return (aggregate(z, as.Date(as.yearqtr(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "s"){ + return (aggregate(z, quarters(time(z)), sumOrMeanFunction,na.rm = TRUE)) + } + + print("Type not understood") +} + + + +# =================================== +# ** From functions_post_treatment ** +# =================================== + + +# -------------------------------Topologie-------------------------------------- +# Topologie <- function (brin,reach) +# find all of the reachable nodes from a given node in a graph +# Args: +# brin: The index of the given HRU | brin pour lequel on veut la topologie +# reach: A matrix containing the indices of all HRUs and their downstream HRUs | le fichier parametre reach.par charge +# Returns: +# A vector containing the indices of all HRUs upstream of the given HRU | la liste des brins en amont du brin choisi + +# take in a vector of reachable nodes from a given node, and return a vector of all nodes that can be reached from the original node. +# Remontee depuis le brin choisi jusqu'a l'amont du bassin +# ------------------------------------------------------------------------------ +Topologie <- function (brin, reach) { + IDs <- NULL + Brin0 <- brin + + for (indice in 1:1000){ + assign(paste0('Brin', indice), NULL) + } + k <- 0 + + while (length(get(paste0('Brin', k)))!=0){ + for (i in seq_along(get(paste0('Brin', k)))){ + assign(paste0('Brin', k + 1), c(get(paste0('Brin', k + 1)), reach[which(reach[, 2]== get(paste0('Brin', k))[i]), 1])) + } + k <- k+1 + } + Total <- brin + for (l in 1:k){ + Total <- unique(c(Total,get(paste0('Brin', l)))) + } + Total +} + + + +# ========================== +# ** From MDR_AERMCprelev ** +# ========================== + + +# -------------------------------Prelev82_1987_2007----------------------------- +# Prelev82_1987_2007 <- function() +# Calculates the mean annual water withdrawal across all cantons over 1987-2007 +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding mean annual water withdrawal +# ------------------------------------------------------------------------------ +Prelev82_1987_2007 <- function() { # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev82 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev82 <- rbind(Prelev82, mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev82data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev82) + colnames(Prelev82data) <- c('canton', 'Prelev82') + + return(Prelev82data) +} + + +# -------------------------------Prelev8182_1987_2007--------------------------- +# Prelev8182_1987_2007 <- function() +# Calculates the mean annual water withdrawals from 1981 to 1982 for each canton. +# Args: +# None +# Returns: +# A dataframe with the canton in the first column and the mean annual water withdrawals in the second column +# ------------------------------------------------------------------------------ +Prelev8182_1987_2007 <- function(){ # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev81_ann <- Prelev[which(Prelev[, 1] == cant), 4] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) + + Prelev8182 <- rbind(Prelev8182, mean(Prelev81_ann["1987/2007"])+mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) + colnames(Prelev8182data) <- c('canton', 'Prelev8182') + + return(Prelev8182data) +} + + +# -------------------------------Prelev8182_2008_2012--------------------------- +# Prelev8182_2008_2012 <- function() +# Finds the average annual water withdrawals for all cantons from 2008-2012. +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding average annual water withdrawals +# ------------------------------------------------------------------------------ +# m3/yr +Prelev8182_2008_2012 <- function(){ + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) # create a data.frame from the .txt file + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) create a vector with all the annual data of the first canal for the canton cant + Prelev81_ann <- Prelev[which(Prelev[,1] == cant),4] * 1000 #(m3) same but with the second canal + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") # create a vector with the date of all the data + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) # create a time series with the data of the first canal and their date + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) # same with the second canal + + Prelev8182 <- rbind(Prelev8182,mean(Prelev81_ann["2008/2012"])+mean(Prelev82_ann["2008/2012"])) # add to a vector the mean of the time series between 2008 and 2012 (5 years) + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) # create a data.frame with the canton and the mean of 5 years + colnames(Prelev8182data) <- c('canton','Prelev8182') # give a name to the columns + + return(Prelev8182data) +} + diff --git a/irrigation-R-codes/lib_old/Analyse_hrus_function.r b/irrigation-R-codes/lib_old/Analyse_hrus_function.r new file mode 100644 index 0000000000000000000000000000000000000000..cc52a49b6741ae3941b19139288498804e61727f --- /dev/null +++ b/irrigation-R-codes/lib_old/Analyse_hrus_function.r @@ -0,0 +1,121 @@ +Analyse_hrus <- function (folder_parameter,Catchment,Name_subCatch,watershed,rewrite){ + +############################################################################ +# folder_parameter : folder where the parameter files are +# Catchment : Name of the catchment +# Name_subCatch : if you have sub-catchment, name of one sub-catchment, else NULL +# watershed : watershed corresponding to the sub-catchment if you have one, else NULL +# rewrite : if TRUE, the code is creating a new file +############################################################################ + +#Lecture HRU.par +hrus <- read.table(paste(folder_parameter,'hrus.par',sep=''),col.names=read.table(paste(folder_parameter,'hrus.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5) +#Lecture soils.par +soils <- read.table(paste(folder_parameter,'soils.par',sep=''),col.names=read.table(paste(folder_parameter,'soils.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5,comment.char='#') +#Lecture hgeo.par +hgeo <- read.table(paste(folder_parameter,'hgeo.par',sep=''),col.names=read.table(paste(folder_parameter,'hgeo.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5,comment.char='#') +#Lecture landuse.par +landuse <- read.table(paste(folder_parameter,'landuse.par',sep=''),col.names=read.table(paste(folder_parameter,'landuse.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5,comment.char='#') + + +#Rewrite the file if necessary +if(rewrite){write.table(NULL,paste(folder_parameter,"Resume_",Catchment,".txt",sep=""),col.names=F,row.names=F,quote=F,sep='\t',append=F)} + + +if (length(Name_subCatch) != 0){ +if (length(watershed) != 0){ +#Extraction of the hrus of the sub-catchment +hrus2 <- NULL +NbWatershed <- length(watershed) +nb <- 0 +while(nb != NbWatershed){ +nb <- nb + 1 +hrus2 <- rbind(hrus2,hrus[(hrus$watershed == watershed[nb]),]) +} +} +} else {hrus2 <- hrus} + +#Volume LPS (= aircap*Area/1000 en m3) +Volume_LPS <- NULL +for (i in c(1:dim(hrus2)[1])){ +Volume_LPS <- c(Volume_LPS,soils$aircap[hrus2$soilID [i] == soils$SID]*as.numeric(hrus2$area[i])/1000) +} + +#We remove the 'fc_sum' column +if(length(which(colnames(soils) == 'fc_sum'))==1){soils <- soils[,-which(colnames(soils) == 'fc_sum')]} +#Reorganisation of fc values +indice_fc <- which(substr(colnames(soils),1,2)=="fc") +fc_value <- as.numeric(substr(colnames(soils)[which(substr(colnames(soils),1,2)=="fc")],4,6)) +fc_number <- cbind(fc_value,indice_fc) + +#Volume MPS (= sum(fc_i for i<rootdepth)*Area/1000 in m3) +Volume_MPS <- NULL +for (i in c(1:dim(hrus2)[1])){ +rootdepth <- round(as.numeric(landuse$rootDepth[hrus2$landuseID [i] == landuse$LID]),0) +Volume_MPS <- c(Volume_MPS,ifelse(rootdepth==0,0,sum(soils[hrus2$soilID [i] == soils$SID,fc_number[fc_number[,1] <= rootdepth,2]]) *as.numeric(hrus2$area[i])/1000)) +} + +#Volume RG1 (=maxRG1 * Area / 1000) +Volume_RG1 <- NULL +for (i in c(1:dim(hrus2)[1])){ +Volume_RG1 <- c(Volume_RG1,hgeo$RG1_max[hrus2$hgeoID [i] == hgeo$GID] *as.numeric(hrus2$area[i])/1000) +} + +#Interception (= LAI * alpha * Area / 1000) depending on the period +#For the model with snow, the value calculated by the code is slightly under-estimed +alpha = 1 + + +Volume_IntD1 <- NULL +Volume_IntD2 <- NULL +Volume_IntD3 <- NULL +Volume_IntD4 <- NULL +for (i in c(1:dim(hrus2)[1])){ +Volume_IntD1 <- c(Volume_IntD1,landuse$LAI_d1[hrus2$landuseID [i] == landuse$LID] * alpha * as.numeric(hrus2$area[i])/1000) +Volume_IntD2 <- c(Volume_IntD2,landuse$LAI_d2[hrus2$landuseID [i] == landuse$LID] * alpha *as.numeric(hrus2$area[i])/1000) +Volume_IntD3 <- c(Volume_IntD3,landuse$LAI_d3[hrus2$landuseID [i] == landuse$LID] * alpha *as.numeric(hrus2$area[i])/1000) +Volume_IntD4 <- c(Volume_IntD4,landuse$LAI_d4[hrus$landuseID [i] == landuse$LID] * alpha *as.numeric(hrus2$area[i])/1000) +} + +#Total storage volume on the catchment for each period +Vtot_d1 <- sum(Volume_MPS + Volume_LPS + Volume_RG1 + Volume_IntD1) +Vtot_d2 <- sum(Volume_MPS + Volume_LPS + Volume_RG1 + Volume_IntD2) +Vtot_d3 <- sum(Volume_MPS + Volume_LPS + Volume_RG1 + Volume_IntD3) +Vtot_d4 <- sum(Volume_MPS + Volume_LPS + Volume_RG1 + Volume_IntD4) + +#Contribution of LPS +PercLPS_d1 <- sum(Volume_LPS) / Vtot_d1 * 100 +PercLPS_d2 <- sum(Volume_LPS) / Vtot_d2 * 100 +PercLPS_d3 <- sum(Volume_LPS) / Vtot_d3 * 100 +PercLPS_d4 <- sum(Volume_LPS) / Vtot_d4 * 100 + +#Contribution of MPS +PercMPS_d1 <- sum(Volume_MPS) / Vtot_d1 * 100 +PercMPS_d2 <- sum(Volume_MPS) / Vtot_d2 * 100 +PercMPS_d3 <- sum(Volume_MPS) / Vtot_d3 * 100 +PercMPS_d4 <- sum(Volume_MPS) / Vtot_d4 * 100 + +#Contribution of RG1 +PercRG1_d1 <- sum(Volume_RG1) / Vtot_d1 * 100 +PercRG1_d2 <- sum(Volume_RG1) / Vtot_d2 * 100 +PercRG1_d3 <- sum(Volume_RG1) / Vtot_d3 * 100 +PercRG1_d4 <- sum(Volume_RG1) / Vtot_d4 * 100 + +#Contribution of Interception storage +PercIntc1_d1 <- sum(Volume_IntD1) / Vtot_d1 * 100 +PercIntc1_d2 <- sum(Volume_IntD2) / Vtot_d2 * 100 +PercIntc1_d3 <- sum(Volume_IntD3) / Vtot_d3 * 100 +PercIntc1_d4 <- sum(Volume_IntD4) / Vtot_d4 * 100 + +#Write the name of the sub-catchment +write.table(rbind("---------------------------------------------------------------------",ifelse(length(Name_subCatch)!=0,Name_subCatch,Catchment),""),paste(folder_parameter,"Resume_",Catchment,".txt",sep=""),col.names=F,row.names=F,quote=F,sep='\t',append=T) + +#Write the final table with all results +Resume <- cbind(c("d1","d2","d3","d4"),c(format(Vtot_d1,scientific=TRUE,digits=3),format(Vtot_d2,scientific=TRUE,digits=3),format(Vtot_d3,scientific=TRUE,digits=3),format(Vtot_d4,scientific=TRUE,digits=3)),rbind(round(c(PercLPS_d1,PercMPS_d1,PercRG1_d1,PercIntc1_d1),2),round(c(PercLPS_d2,PercMPS_d2,PercRG1_d2,PercIntc1_d2),2),round(c(PercLPS_d3,PercMPS_d3,PercRG1_d3,PercIntc1_d3),2),round(c(PercLPS_d4,PercMPS_d4,PercRG1_d4,PercIntc1_d4),2))) +colnames(Resume) <- c("Period","Vtot(m3)","%LPS","%MPS","%RG1","%intercept") +write.table(Resume,paste(folder_parameter,"Resume_",Catchment,".txt",sep=""),col.names=T,row.names=F,quote=F,sep='\t',append=T) + +# Add the catchment area in the file +Area_Catchment <- sum(as.numeric(hrus2$area)) +write.table(paste("Catchment area : ", round(Area_Catchment/1000000,0)," km2",sep=""),paste(folder_parameter,"Resume_",Catchment,".txt",sep=""),col.names=F,row.names=F,quote=F,sep='\t',append=T) +} \ No newline at end of file diff --git a/irrigation-R-codes/lib_old/Functions_post_treatment.r b/irrigation-R-codes/lib_old/Functions_post_treatment.r new file mode 100644 index 0000000000000000000000000000000000000000..d90d1906aa2ba54143ad3e8f376ac6accc21395e --- /dev/null +++ b/irrigation-R-codes/lib_old/Functions_post_treatment.r @@ -0,0 +1,2033 @@ +############# Creation du fichier hrus.par pour prise en compte relief +##################################################################################### +#Inputs : +# - hrus : le fichier parametre hrus.par charge +# - seuil : valeur de la pente a partir de laquelle on va modifier +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# +# Fonction : +# - On verifie qu'on a bien des hrus pour laquelle la pente est superieure au seuil fixe +# - On s'assure qu'on n'a pas deja augmente les parametres (hgeo <= 8) +# - On ajoute le jeu montagne (hgeo + 8 , landuse + 8, soil + 4) +# - On ecrit le nouveau fichier hrus.par dans le dossier Modif +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +Relief = function (hrus,seuil,chemin_hrus) { +dir.create(paste(chemin_hrus,'Modif/',sep='')) +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(chemin_hrus,'hrus.par') +headerHRU = readLines(paste(chemin_hrus,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(chemin_hrus,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +indice = which((hrus[,which(Names == "slope")] > seuil) & (hrus[,which(Names=="hgeoID")] <= 8)) +if (length(indice !=0)){ +hrus[indice,which(Names=="hgeoID")] = hrus[indice,which(Names=="hgeoID")] + 8 +hrus[indice,which(Names=="landuseID")] = hrus[indice,which(Names=="landuseID")] + 8 +hrus[indice,which(Names=="soilID")] = hrus[indice,which(Names=="soilID")] + 4 +} else { +print(paste('Pas de modifications : pas de pentes superieures a ',seuil,' deg ou indices deja modifies!', sep=''));flush.console() +} +write.table(headerHRU,paste(cheminHRUmodif,'hrus.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F) +write.table(hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F,row.names=F,quote=F,append=T,sep='\t') +hrus +} + + +#Correction des pentes des brins inferieures a seuil_min +##################################################################################### +#Inputs : +# - reach : le fichier parametre reach.par charge +# - seuil_min : valeur seuil de la pente - toute pente inferieure au seuil sera augmentee +# - chemin_reach : chemin dans lequel se trouve le fichier reach.par initial +# +# Fonction : +# - On verifie qu'on a bien des brins pour laquelle la pente est inferieure au seuil fixe +# - On augmente la valeur des pentes des brins reperes +# - On ecrit le nouveau fichier reach.par dans le dossier Modif +# +# Output : +# - le fichier reach.par actualise +##################################################################################### +CorrectionPenteminReach = function (reach,seuil_min,chemin_reach){ +dir.create(paste(chemin_reach,'Modif/',sep='')) +cheminReachmodif = paste(chemin_reach,'Modif/',sep='') +nbLines = Lignes_saut(chemin_reach,'reach.par') +headerReach = readLines(paste(chemin_reach,'reach.par',sep=''), n = nbLines) +LinesNames = which(substr(headerReach,1,2)=="ID") +Names = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames-1) +indice = which(as.numeric(as.character(reach[,which(Names == "slope")])) < seuil_min) +if(length(indice)!=0){ +reach[indice,which(Names == "slope")] <- seuil_min} +else{ +print(paste('Toutes les pentes des brins sont superieures a ',seuil_min,' %!', sep=''));flush.console() +} + +reach = reach[order(reach[,1]),] +write.table(headerReach,paste(cheminReachmodif,'reach.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F) +write.table(reach,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +reach +} + + +# Correction largeurs et rugosite (en remplacement des valeurs 30 et 1 m pour la rugosite et la largeur des brins en sortie de HRU-delin et GRASS-HRU) +##################################################################################### +#Inputs : +# - reach : le fichier parametre reach.par charge +# - SurfaceDrainee : un tbleau de 2 colonnes (1ere colonne : n?brin, 2eme colonne : surface drainee a l'exutoire du brin +# - chemin_reach : chemin dans lequel se trouve le fichier reach.par initial +# +# Fonction : +# - En fonction de l'aire drainee par le brin, on affecte une largeur et une rugosite a ce dernier +# - On ecrit le nouveau fichier reach.par dans le dossier Modif +# +# Output : +# - le fichier reach.par actualise +##################################################################################### +CorrectionLargRug = function (reach,SurfaceDrainee,chemin_reach){ +dir.create(paste(chemin_reach,'Modif/',sep='')) +cheminReachmodif = paste(chemin_reach,'Modif/',sep='') +nbLines = Lignes_saut(chemin_reach,'reach.par') +headerReach = readLines(paste(chemin_reach,'reach.par',sep=''), n = nbLines) +LinesNames = which(substr(headerReach,1,2)=="ID") +Names = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames-1) +indID = which(Names == "ID") +indRug = which(Names == "rough") +indLarg = which(Names == "width") + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] < 100 ),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 15 +reach[which(reach[,indID] == brin),indLarg] = 10 +} + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] >= 100 & SurfaceDrainee[,2] < 400),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 30 +reach[which(reach[,indID] == brin),indLarg] = 15 +} + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] >= 400 & SurfaceDrainee[,2] < 1350),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 40 +reach[which(reach[,indID] == brin),indLarg] = 30 +} + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] >= 1350 & SurfaceDrainee[,2] < 7000),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 50 +reach[which(reach[,indID] == brin),indLarg] = 50 +} + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] >= 7000 & SurfaceDrainee[,2] < 20500),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 60 +reach[which(reach[,indID] == brin),indLarg] = 100 +} + +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] >= 20500),1] +for (brin in brin0){ +reach[which(reach[,indID] == brin),indRug] = 70 +reach[which(reach[,indID] == brin),indLarg] = 200 +} + +reach = reach[order(reach[,1]),] +write.table(headerReach,paste(cheminReachmodif,'reach.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F) +write.table(reach,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +reach +} + + +#Correction des pentes des HRUs inferieures a seuil_min +##################################################################################### +#Inputs : +# - hrus : le fichier parametre hrus.par charge +# - seuil_min : valeur seuil de la pente - toute pente inferieure au seuil sera augmentee +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# +# Fonction : +# - On verifie qu'on a bien des HRUs pour laquelle la pente est inferieure au seuil fixe +# - On augmente la valeur des pentes des hrus reperees +# - On ecrit le nouveau fichier hrus.par dans le dossier Modif +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +CorrectionPenteminHRU = function (hrus,seuil_min,chemin_hrus){ +dir.create(paste(chemin_hrus,'Modif/',sep='')) +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(chemin_hrus,'hrus.par') +headerHRU = readLines(paste(chemin_hrus,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(chemin_hrus,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +indice = which(as.numeric(as.character(hrus[,which(Names == "slope")])) < seuil_min) +if(length(indice)!=0){ +hrus[indice,which(Names == "slope")] <- seuil_min} +else{ +print(paste('Toutes les pentes des HRUs sont superieures a ',seuil_min,' deg!', sep=''));flush.console() +} +write.table(headerHRU,paste(cheminHRUmodif,'hrus.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F) +write.table(hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +hrus +} + + +#Chargement des fichiers parametres +##################################################################################### +#Inputs : +# - chemin : le dossier dans lequel se situe le fichier desire +# - Name : le nom du fichier desire +# +# Fonction : +# - Charger le fichier parametre hrus.par ou reach.par +# +# Output : +# - le fichier parametre desire +# Astuce : +# - Le code est capable d'identifier la ligne avec les premieres valeurs et de sauter les lignes de texte initiales +##################################################################################### +Chargement_param = function(chemin,Name){ # NTK ???? (aussi dans utilitaires :3) +k=0 +obj = NULL +obj2 = NULL +obj3 = NULL +while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0){ +obj = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k,colClasses="character"))[1] +obj2 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+1,colClasses="character"))[1] +obj3 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+2,colClasses="character"))[1] +k=k+1} +nbLines = k - 1 +read.table(paste(chemin,Name,sep=''),skip=nbLines) +} + +#Fonction pour calculer le nombre de lignes a sauter en lecture du fichier paramatre +##################################################################################### +#Inputs : +# - chemin : le dossier dans lequel se situe le fichier desire +# - Name : le nom du fichier desire +# +# Fonction : +# - Identifier le nombre de lignes a sauter en lecture +# Output : +# - le nombre de lignes a ignorer +##################################################################################### +Lignes_saut = function(chemin,Name){ #NTK +k=0 +obj = NULL +obj2 = NULL +obj3 = NULL +while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0){ +obj = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k,colClasses="character"))[1] +obj2 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+1,colClasses="character"))[1] +obj3 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+2,colClasses="character"))[1] +k=k+1} +nbLines = k - 1 +nbLines +} + + +# Ajout des informations sur les barrages dans le fichier reach.par +##################################################################################### +#Inputs : +# - chemin_reach : le dossier dans lequel se situe le fichier reach.par original +# - reach : le fichier parametre reach.par charge +# - liste_barrage : la liste des barrages (issu du fichier csv) - necessite d'avoir une colonne avec le nom du barrage (intitulee 'Nom') la capacite max (en Mm3), le volume initial (en Mm3) et le brin sur lequel s'applique le barrage +# - Bar_colSmax : colonne comportant les capacites maximales des barrages +# - Bar_colV0 : colonne comportant les volumes initiaux des barrages (au 01/01/1985) +# - Bar_colBrinSortie : colonne comportant le n? du brin de sortie du barrage (brin directement a l'aval du barrage) +# +# Fonction : +# - Une fois la liste de barrages chargee, on affecte a chaque brin present dans la colonne BrinSortie la capacite max et le volume initial du barrage correspondant +# +#Output : +# - le fichier reach.par actualise +##################################################################################### +Ajout_barrage_reach = function (chemin_reach,reach,liste_barrage,Bar_colSmax,Bar_colV0,Bar_colBrinSortie){ +cheminReachmodif = paste(chemin_reach,'Modif/',sep='') +nbLines = Lignes_saut(chemin_reach,'reach.par') +headerReach = readLines(paste(chemin_reach,'reach.par',sep=''), n = nbLines) +LinesNames = which(substr(headerReach,1,2)=="ID") +Names = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames-1) +Names = cbind(Names,'Smax','V0') +Min = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames) +Min = cbind(Min,0,0) +Max = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames+1) +Max = cbind(Max,9999999,9999999) +Unit = read.table(paste(chemin_reach,'reach.par',sep=''),nr=1,skip=LinesNames+2) +Unit = cbind(Unit,'Mm3','Mm3') +reach = cbind(reach,0,0) +for (i in c(1:dim(liste_barrage)[1])){ +X = which(reach[,1]==liste_barrage[i,Bar_colBrinSortie]) +if (length(X) != 0){ +reach[X,(dim(reach)[2]-1)] = liste_barrage[i,Bar_colSmax] +reach[X,(dim(reach)[2])] = liste_barrage[i,Bar_colV0] +} else { +print(paste("attention, pas de brin correspondant au brin mentionne pour le barrage ", as.character(liste_barrage[i,'Nom']),sep=""));flush.console()} +} +write.table (Names,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=F) +write.table (Min,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Max,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Unit,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (reach,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +reach +} + + +# Modification des paramatres dans hrus.par pour la Saone et la Dombes +##################################################################################### +#Inputs : +# - hrus : le fichier parametre hrus.par charge +# - chemin_hrus : le dossier dans lequel se situe le fichier reach.par original +# - reach : le fichier parametre reach.par charge +# - BrinSaoneAval : brin correspondant a l'exutoire de la Saone jusqu'ou nous souhaitons apporter la modification sur les parametres sols, geologie et landuse +# - Dombes_Chalaronne : brin exutoire de la Chalaronne, a sa confluence avec la Saone +# - Dombes_Veyle : brin exutoire de la Veyle, a sa confluence avec la Saone +# +# Fonction : +# - Identifier les hrus ayant les parametres a modifier (2 et 10 pour le landuseSaone, 4 et 12 pour le landuseDombes, 1 et 5 pour le soilSaone) et effectuer la modification +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +Traitement_Saone = function(hrus,chemin_hrus,reach,BrinSaoneAval,Dombes_Chalaronne,Dombes_Veyle){ +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(cheminHRUmodif,'hrus.par') +headerHRU = readLines(paste(cheminHRUmodif,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames-1) + +indID = which(Names=="ID") +indGeol = which(Names=="hgeoID") +indLand = which(Names=="landuseID") +indSoil = which(Names=="soilID") +indSub = which(Names=="subbasin") +#GeolSaoneMod = hrus[which(hrus[,indGeol] == 8 | hrus[,indGeol] == 16),indID] +LanduseSaoneMod = hrus[which(hrus[,indLand] == 2 | hrus[,indLand] == 10),indID] +LanduseDombesMod = hrus[which(hrus[,indLand] == 4 | hrus[,indLand] == 12),indID] +SoilSaoneMod = hrus[which(hrus[,indSoil] == 1 | hrus[,indSoil] == 5),indID] +brins_saone = Topologie(BrinSaoneAval,reach) +brins_chala = Topologie(Dombes_Chalaronne,reach) +brins_veyle = Topologie(Dombes_Veyle,reach) +ind = NULL +Total_hru_Saone = NULL +for (k in brins_saone){ +Total_hru_Saone = c (Total_hru_Saone,hrus[hrus[,indSub] == k,indID])} + +Total_hru_Chala = NULL +for (k in brins_chala){ +Total_hru_Chala = c (Total_hru_Chala,hrus[hrus[,indSub] == k,indID])} + +Total_hru_Veyle = NULL +for (k in brins_veyle){ +Total_hru_Veyle = c (Total_hru_Veyle,hrus[hrus[,indSub] == k,indID])} + +for (k in Total_hru_Saone){ +#if(length(which(k == GeolSaoneMod)) != 0) {hrus[which(k == hrus[,indID]),indGeol] <- 17} +#if(length(which(k == SoilSaoneMod)) != 0) {hrus[which(k == hrus[,indID]),indSoil] <- 9} +if(length(which(k == LanduseSaoneMod)) != 0) {hrus[which(k == hrus[,indID]),indLand] <- 17} +} + +for (k in Total_hru_Chala){ +if(length(which(k == SoilSaoneMod)) != 0) {hrus[which(k == hrus[,indID]),indSoil] <- 10} +if(length(which(k == LanduseDombesMod)) != 0) {hrus[which(k == hrus[,indID]),indLand] <- 18} +} + +for (k in Total_hru_Veyle){ +if(length(which(k == SoilSaoneMod)) != 0) {hrus[which(k == hrus[,indID]),indSoil] <- 10} +if(length(which(k == LanduseDombesMod)) != 0) {hrus[which(k == hrus[,indID]),indLand] <- 18} +} +write.table(headerHRU,paste(cheminHRUmodif,'hrus.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F,append=F) +write.table(hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F,row.names=F,quote=F,append=T,sep='\t') +hrus +} + + +# Mise en place de la parametrisation pour l'irrigation +##################################################################################### +#Inputs : +# - hrus : le fichier parametre hrus.par charge +# - chemin_hrus : le dossier dans lequel se situe le fichier reach.par original +# - HRUs_shp : le fichier cartographique des HRUs charge (produit de HRU-delin ou GRASS-HRU +# - Cantons_shp : le fichier cartographique des cantons irrigues charge +# - ColCultDom : Colonne dans le fichier des cantons irrigues donnant la culture dominante (attention, il est necessaire que les noms des cultures soient identiques a celles du tableau irrigation_table.csv pour la suite des traitements) +# +# Fonction : +# - Correction eventuelle de la colonne irrigated (sortie de HRU-delin) en cas de 1 et de landuse different des landuses agricoles +# - Identification des HRUs agricoles et irriguees +# - Pour chaque HRU irriguee, on croise avec les cantons irrigues, et on repere le canton le plus represente dans l'intersection +# - On enregistre n?HRU irriguee, Culture dominante et Canton correspondant dans HRUs_culture_test.csv +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +CorrectionParamIrrig = function (hrus,chemin_hrus,HRUs_shp,Cantons_shp,ColCultDom){ +dir.create(paste(chemin_hrus,'Modif/',sep='')) +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(chemin_hrus,'hrus.par') +headerHRU = readLines(paste(chemin_hrus,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(chemin_hrus,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +indLand = which(Names == 'landuseID') +indIrr = which(Names == 'irrigated') +if(length(indIrr) != 0){ +indice = which(hrus[,indLand] != 4 & hrus[,indIrr] == 1 & hrus[,indLand] < 19) +if(length(indice) !=0){ +hrus[indice,indIrr] = 0 +}else {print("pas de hru ayant irrigated = 1 et landuse != 4 dans votre fichier hrus.par");flush.console()} +hrus_irrig = hrus[which(hrus[,indIrr] == 1),1] +write.table(t(c('HRU','Culture','Canton')),'C:/Users/tilmant/Desktop/Final/BILAN/Irrigation/HRUs_culture_test.csv',sep=';',quote=F,append=F,col.names=F,row.names=F) +for (k in c(1:length(hrus_irrig))){ +BV2 = HRUs_shp[which(HRUs_shp$cat == hrus_irrig[k]),] +proj4string(BV2)<- proj4string(Cantons_shp) +BILAN=NULL +inters<- list() +states<- list() +pos<- which(!is.na(over(Cantons_shp,BV2))) +for (j in 1 : length(pos)) { +states[[j]]<- Cantons_shp[pos[j], 1] +inters[[j]]<- gIntersection(states[[j]],BV2) +if (class (inters[[j]]) == 'SpatialCollections'){inters[[j]]<- gIntersection(states[[j]],BV2)@polyobj} +BILAN = rbind(BILAN,c(Cantons_shp[pos[j],][[1]],area(inters[[j]]))) +} +Cant <- BILAN[which(BILAN[,2] == max(BILAN[,2])),1] +write.table(t(c(hrus_irrig[k],as.character(Cantons_shp[[ColCultDom]][which(Cantons_shp@data [,1] == Cant)]),Cant)),'C:/Users/tilmant/Desktop/Final/BILAN/Irrigation/HRUs_culture_test.csv',sep=';',quote=F,append=T,col.names=F,row.names=F) +} +}else { +print("pas de colonne 'irrigated' dans votre fichier hrus.par");flush.console() +} +write.table(headerHRU,paste(cheminHRUmodif,'hrus.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F,append=F) +write.table(hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F,row.names=F,quote=F,append=T,sep='\t') +hrus +} + +#Chargement des couches sig +##################################################################################### +#Inputs : +# - chemin : chemin complet du fichier a lire +# +# Fonction : +# - Identification du nom de la couche +# - lecture du fichier sig +# +# Output : +# - la couche desiree +##################################################################################### +Chargement_couche = function(chemin){ +indice = NULL +for (i in c(1:nchar(chemin))){ +Test = substr(chemin,i,i) +if(Test =="/"){indice = c(indice,i)} +} +readOGR (dsn=substr(chemin,1,indice[length(indice)]-1), layer=substr(chemin,indice[length(indice)]+1,nchar(chemin)-4)) +} + +#Fonction pour recreer la topologie a partir d'un brin donne +# The point of this function is to find all of the reachable nodes from a given node in a graph. +# The point of the function is to take in a vector of reachable nodes from a given node, and to return a vector of all nodes that can be reached from the original node. +##################################################################################### +#Inputs : +# - brin : brin pour lequel on veut la topologie +# - reach : le fichier parametre reach.par charge +# +# Fonction : +# - Remontee depuis le brin choisi jusqu'a l'amont du bassin +# +# Output : +# - la liste des brins en amont du brin choisi +##################################################################################### +Topologie = function (brin,reach) { # NTK +IDs <- NULL +Brin0 <- brin +for (indice in c(1:1000)){ +assign(paste('Brin',indice,sep=''),NULL)} +k =0 +while (length(get(paste('Brin',k,sep='')))!=0){ +for (i in c(1:length(get(paste('Brin',k,sep=''))))){ +assign(paste('Brin',k+1,sep=''),c(get(paste('Brin',k+1,sep='')),reach[which(reach[,2]== get(paste('Brin',k,sep=''))[i]),1])) +} +k = k+1 +} +Total <- brin +for (l in c(1:k)){ +Total <- unique(c(Total,get(paste('Brin',l,sep='')))) +} +Total +} + +#Modification des sous-bassins pour les fichiers de HRU-delin (pour avoir brin exutoire = sous-bassin +##################################################################################### +#Inputs : +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# - hrus : le fichier parametre hrus.par charge +# +# Fonction : +# - Recherche des hrus connectees aux brins +# - Remontee topologique des HRUs +# - Remplacement du code subbasin du fichier hrus.par pour coller a la nouvelle numerotation des brins (etape indispensable pour le fonctionnement du module d'irrigation) +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +Correction_subbasin = function (chemin_hrus,hrus){ +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(cheminHRUmodif,'hrus.par') +headerHRU = readLines(paste(cheminHRUmodif,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +indToReach = which(Names=="to_reach") +indToPoly = which(Names=="to_poly") +indSub = which(Names=="subbasin") +indID = which(Names=="ID") +HRUScon = hrus[which(hrus[,indToReach] !=0),indID] +for (k in HRUScon){ +total = k +hrus_tot = NULL +hrus_tot = c(hrus_tot,hrus[which(hrus[,indToPoly]==k),indID]) +if(length(hrus_tot) != 0){ +newhrus2 = hrus_tot +total = c(total,newhrus2) +while(length(newhrus2) !=0){ +newhrus2 = NULL +for (i in hrus_tot){ +newhrus = NULL +newhrus = hrus[which(hrus[,indToPoly]==i),indID] +newhrus2 = c(newhrus2,newhrus) +} +hrus_tot = newhrus2 +total = c(total,newhrus2) +}} +for(m in total){hrus[which(hrus[,indID] ==m),indSub] <- hrus[which(hrus[,indID] ==k),indToReach]} +} +write.table(headerHRU,paste(cheminHRUmodif,'hrus.par',sep=''),sep='\t',col.names=F,row.names=F,quote=F,append=F) +write.table(hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F,row.names=F,quote=F,append=T,sep='\t') +hrus +} + + + +#S'assurer qu'il y a bien un brin affecte a chaque station (necessaire pour la Topologie) +##################################################################################### +#Inputs : +# - Correspondance : le fichier charge des correspondances brins/stations +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# - hrus : le fichier parametre hrus.par charge +# - Corres_ID_Stations : l'objet faisant le lien entre ID et code station issu de la couche SIG des stations +# +# Fonction : +# - Verification que tous les watersheds presents dans hrus.par correspondent a une station presente dans le fichier de correspondance (necessaire pour pouvoir calculer l'aire des bassins) +# +# Output : +# - Message d'erreur en cas de manque - station a ajouter a la main dans le fichier csv de correspondance +##################################################################################### +verification_correspondance = function (Correspondance,chemin_hrus,hrus,Corres_ID_Stations){ +nbLines = Lignes_saut(chemin_hrus,'hrus.par') +headerHRU = readLines(paste(chemin_hrus,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(chemin_hrus,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +ID = as.numeric(Corres_ID_Stations[,1]) +ID_hrus = unique(hrus[,which(Names=="watershed")]) +indice = NULL +for (m in ID_hrus){ +indice = c(indice,which(Corres_ID_Stations[,1] == m)) +} +Stations_necessaires = Corres_ID_Stations[indice,2] +Bilan = NULL +for (k in Stations_necessaires){ +if(length(which(as.character(Correspondance[,1]) == k)) == 0){Bilan = paste(Bilan,' - ',k,sep='')} +} +if(length(Bilan)!=0){ +print(paste('Attention, il manque les stations ',Bilan,' dans le fichier Correspondance. Merci de les ajouter',sep=''));flush.console() +stop("Stations manquantes")} +} + + + + +#Ajout des parametres d'irrigation +##################################################################################### +#Inputs : +# - hrus : le fichier parametre hrus.par charge +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# - irrigation_table : issu du fichier csv, relation landuseID,culture,type d'irrigation +# - hrus_irrig : la liste des hrus irriguees avec leurs cultures dominantes (issue du croisement des sig hrus/cantons irrigues +# +# Fonction : +# - Ajout de la colonne irrig_type (1 : Asp, 2 : GaG, 3 : Grav) +# +# Output : +# - le fichier hrus.par actualise +##################################################################################### +Irrigation_param = function (hrus,chemin_hrus,irrigation_table,hrus_irrig){ +cheminHRUmodif = paste(chemin_hrus,'Modif/',sep='') +nbLines = Lignes_saut(cheminHRUmodif,'hrus.par') +headerHRU = readLines(paste(cheminHRUmodif,'hrus.par',sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames-1) +Names = cbind(Names,'irrigated','irrig_type') +Min = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames) +Min = cbind(Min,0,0) +Max = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames+1) +Max = cbind(Max,999,999) +Unit = read.table(paste(cheminHRUmodif,'hrus.par',sep=''),nr=1,skip=LinesNames+2) +Unit = cbind(Unit,'n/a','n/a') + +indLanduse = which(Names == "landuseID") +hrus <- cbind(hrus,0,0) +for (hru in hrus_irrig[,1]){ +hrus[which(hrus[,1] == hru),(dim(hrus)[2]-1)] <- 1 +indCult = irrigation_table[which(as.character(irrigation_table[,2]) == as.character(hrus_irrig[which(hrus_irrig[,1]== hru),2])),1] +irrig_type = as.character(irrigation_table[which(as.character(irrigation_table[,2]) == as.character(hrus_irrig[which(hrus_irrig[,1]== hru),2])),3]) +if(irrig_type == "GaG"){indIrr = 2} else {if(irrig_type == "Asp"){indIrr = 1} else { if(irrig_type == "Grav"){indIrr = 3} else {print("type inconnu");flush.console()}}} +hrus[which(hrus[,1] == hru),indLanduse] = indCult +hrus[which(hrus[,1] == hru),dim(hrus)[2]] <- indIrr +} +write.table (Names,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=F) +write.table (Min,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Max,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Unit,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (hrus,paste(cheminHRUmodif,'hrus.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +hrus +} + + + + +#Regionalisation du TA +##################################################################################### +#Inputs : +# - chemin_reach : chemin dans lequel se trouve le fichier reach.par initial +# - reach : le fichier parametre reach.par charge +# - SurfaceDrainee : surface drainee pour chaque brin (objet issu de la fonction ou d'un fichier csv) +# - BrinSaoneAval : brin a partir duquel nous allons fixer les TA pour la Saone +# - BrinLemanAval : brin a partir duquel nous allons fixer tous les TA a 1 (sortie du Leman) +# +# Fonction : +# - Identifier les brins par surface drainee (4 classes : <=100, 100 > S >= 300 , 300 > S >= 25000 , S > 25000) +# - Recuperation des brins amont de la Saone +# - Recuperation des brins amont du Rhone (sortie du Leman) +# - Regionalisation du TA selon la surface drainee et la position geographique (Saone, Leman, reste BV) +# +# Output : +# - le fichier reach.par actualise +##################################################################################### +TA = function(chemin_reach,reach,SurfaceDrainee,BrinSaoneAval,BrinLemanAval){ +cheminReachmodif = paste(chemin_reach,'Modif/',sep='') +nbLines = Lignes_saut(cheminReachmodif,'reach.par') +headerReach = readLines(paste(cheminReachmodif,'reach.par',sep=''), n = nbLines) +LinesNames = which(substr(headerReach,1,2)=="ID") +Names = read.table(paste(cheminReachmodif,'reach.par',sep=''),nr=1,skip=LinesNames-1) +Names = cbind(Names,'TA') +Min = read.table(paste(cheminReachmodif,'reach.par',sep=''),nr=1,skip=LinesNames) +Min = cbind(Min,0) +Max = read.table(paste(cheminReachmodif,'reach.par',sep=''),nr=1,skip=LinesNames+1) +Max = cbind(Max,99) +Unit = read.table(paste(cheminReachmodif,'reach.par',sep=''),nr=1,skip=LinesNames+2) +Unit = cbind(Unit,'n/a') +reach = cbind(reach,0.5) +brin0 <- SurfaceDrainee[which(SurfaceDrainee[,2] <= 100),1] +brin100 <- SurfaceDrainee[which(SurfaceDrainee[,2] > 100 & SurfaceDrainee[,2] <= 300),1] +brin300 <- SurfaceDrainee[which(SurfaceDrainee[,2] > 300 & SurfaceDrainee[,2] <= 25000),1] +brin25000 <- SurfaceDrainee[which(SurfaceDrainee[,2] > 25000),1] +reaches_saone <- Topologie (BrinSaoneAval,reach) +reaches_leman <- Topologie (BrinLemanAval,reach) + +for (reach2 in brin0){ +reach[which(reach[,1]==reach2),dim(reach)[2]] = 0.5 +if (length(which(reaches_saone == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 1} +if (length(which(reaches_leman == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 1} +} + +for (reach2 in brin100){ +reach[which(reach[,1]==reach2),dim(reach)[2]] = 1.5 +if (length(which(reaches_saone == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 2} +if (length(which(reaches_leman == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 1} +} +for (reach2 in brin300){ +reach[which(reach[,1]==reach2),dim(reach)[2]] = 2 +if (length(which(reaches_saone == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 2} +if (length(which(reaches_leman == reach2)) != 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 1} +} +for (reach2 in brin25000){ +reach[which(reach[,1]==reach2),dim(reach)[2]] = 10 +if (length(which(reaches_saone == reach2))!= 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 10} +if (length(which(reaches_leman == reach2))!= 0){reach[which(reach[,1]==reach2),dim(reach)[2]] = 1} +} +write.table (Names,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=F) +write.table (Min,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Max,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (Unit,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +write.table (reach,paste(cheminReachmodif,'reach.par',sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +reach +} + + +#Calcul de l'aire drainee a chaque brin +##################################################################################### +#Inputs : +# - reach : le fichier parametre reach.par charge +# - hrus : le fichier parametre hrus.par charge +# - chemin_hrus : chemin dans lequel se trouve le fichier hrus.par initial +# +# Fonction : +# - Recuperation des brins de reach +# - Si on est au dernier brin (exutoire du BV => to_reach = 0), on somme la surface des HRUs ayant ce brin pour subbasin) +# - Pour un brin donne, on remonte vers ses "to_reach" : +# - si le brin to_reach n'a jamais etudie, on calcule la somme des HRUs qui sont connectees a ce brin +# - si le brin to_reach a deja ete etudie, on passe au suivant +# - Une fois que tous les brins amont sont etudies, on rassemble tous les brins amont +# - On somme toutes les aires calculees pour les differents brins et on convertit la somme en km2 +# - On stocke l'aire drainee et le brin correspondant dans un fichier csv +# +# Output : +# - un objet SurfaceDrainee avec col 1 = n? brin et col 2 = surface drainee en km2 +# - le fichier Aire_drainee_brins.csv' +##################################################################################### +CalculAireDraineeBrin = function (reach,hrus,chemin_hrus, hrufilename){ +nbLines = Lignes_saut(chemin_hrus,hrufilename) +headerHRU = readLines(paste(chemin_hrus,hrufilename,sep=''), n = nbLines) +LinesNames = which(substr(headerHRU,1,2)=="ID") +Names = read.table(paste(chemin_hrus,hrufilename,sep=''),nr=1,skip=LinesNames-1) +Total_surf <- NULL +indSurf = which(Names=="area") +indID = which(Names=="ID") +Aire_brins = matrix(c(0,0),1,2) +for (brin_choisi in reach[,which(Names == "ID")]){ +if(reach[brin_choisi== reach[,1],2] == 0) {Aire_brins = rbind (Aire_brins,c(brin_choisi,sum(hrus[hrus[,which(Names=="subbasin")] == brin_choisi,indSurf])))} +serie <- NULL +serie2 <- NULL +serie3 <- NULL +Co = 1 +X0 <- reach[which(reach[,2] == brin_choisi),1] +if (length(X0) != 0){ +Prec = X0 +while(length(Prec)!=0){ +for (k in c(1:length(Prec))){ +newbrins = reach[reach[,2] == Prec[k],1] +serie <- c(serie,newbrins) +Aire = 0 +for (n in c(1:length(newbrins))){ +if (length(newbrins) !=0 & length(which(newbrins[n]==Aire_brins[,1])) ==0){ +Aire = sum(hrus[hrus[,which(Names=="subbasin")] == newbrins[n],indSurf]) +Aire_brins = rbind(Aire_brins,c(newbrins[n],Aire)) +} +} +} +assign(paste("X",Co,sep=""),serie) +serie <- NULL +Co = Co + 1 +Prec = get(paste("X",Co-1,sep="")) +} +for (m in c(0:(Co-1))){ +serie2 <- c(serie2,get(paste("X",m,sep=""))) +assign(paste("X",m,sep=""),NULL)} +} +serie3 = unique(c(brin_choisi,serie2)) +serie3 = serie3[length(serie3):1] +Temp = NULL +for (n in c(1:length(serie3))){ +Aire = Aire_brins[which(Aire_brins[,1]==serie3[n]),2] +Temp = c(Temp,Aire) +} +Total_surf <- rbind(Total_surf,c(brin_choisi, sum(Temp)/1000000)) +print(brin_choisi);flush.console() +} +colnames(Total_surf) <- c('Brin','Surface drainee(en km2)') +write.table(Total_surf,paste(chemin_hrus,'Aire_drainee_brins.csv',sep=''),sep=';',dec='.',quote=F,row.names=F,col.names=T) +Total_surf +} + + +######################################################################################## +######################################################################################## +######################################################################################## +###### Code pour ecrire le modele J2000-Rhone a partir des differentes entrees ######### +######################################################################################## +######################################################################################## + +############################################################################################## +############################################################################################## +###### Partie du code qui ajoute les boucles Init et FilterInit ############################## +############################################################################################## +# Parametres : +# - Nom_Boucle : code de la station etudiee +# - Nom_Modele : Nom du modele (le nom qui sera ecrit quand vous lancerez le modele avec JAMS (attention, ce n'est pas le nom du fichier de sortie!) +# - output_file : Fichier de sortie +# - Dossier_Temp : Dossier ou sera enregistre le modele final (ainsi que des boucles intermediaires) +# - Water2 : la liste des stations en amont de la station etudiee +# - Corres_ID_Stations : 2 colonnes : 1)ID , 2)code des stations +# +############################################################################################## +Boucle_Init = function (Nom_Boucle,Nom_Modele,output_file,Dossier_Temp,Water2,Corres_ID_Stations){ +write.table(paste(' <contextcomponent class="jams.components.conditional.FilteredSpatialContext" enabled="true" name="',Nom_Boucle,'Init" version="1.1_0"> + <var name="attributeName" value="watershed"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +write.table(t(Water2),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_bv <- read.table(paste(Dossier_Temp,'Boucle_temp.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) + +write.table(paste(' <var name="attributeValues" value="',liste_bv[1,1],'"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <component class="org.unijena.j2k.aggregate.SumAggregator" enabled="true" name="WatershedAreaCalculator" version="1.0_0"> + <var attribute="area" context="',Nom_Boucle,'Init" name="value"/> + <var attribute="',Nom_Boucle,'area" context="',Nom_Modele,'" name="sum"/> + </component> + </contextcomponent> + + <contextcomponent class="jams.components.conditional.FilteredSpatialContext" enabled="true" name="',Nom_Boucle,'FilterInit" version="1.1_0"> + <var name="attributeName" value="watershed"/> + <var name="attributeValues" value="',liste_bv[1,1],'"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <component class="org.unijena.j2k.CalcAreaWeight" enabled="true" name="CalcAreaWeight" version="1.0_0"> + <var attribute="area" context="',Nom_Boucle,'FilterInit" name="entityArea"/> + <var attribute="',Nom_Boucle,'areaweight" context="',Nom_Boucle,'FilterInit" name="areaWeight"/> + <var attribute="',Nom_Boucle,'area" context="',Nom_Modele,'" name="catchmentArea"/> + </component> + </contextcomponent>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +} + + + + +############################################################################################## +############################################################################################## +###### Recuperation des watersheds dependant d'une station donnee ############################ +############################################################################################## +# Parametres : +# - Nom_Boucle : Code de la station etudiee +# - Correspondance : Correspondance entre code station et brin ou obtenir le debit +# - reach : le fichier parametre reach.par charge +# - Corres_ID_Stations : 2 colonnes : 1)ID , 2)code des stations +# +############################################################################################## +Water = function(Nom_Boucle,Correspondance,reach,Corres_ID_Stations){ +Brins = Correspondance[which(Correspondance[,1]== Nom_Boucle),2] +Watersheds = NULL +for (brin in Brins) { +IDs <- NULL +Brin0 <- brin +for (indice in c(1:1000)){ +assign(paste('Brin',indice,sep=''),NULL)} +k =0 +while (length(get(paste('Brin',k,sep='')))!=0){ +for (i in c(1:length(get(paste('Brin',k,sep=''))))){ +assign(paste('Brin',k+1,sep=''),c(get(paste('Brin',k+1,sep='')),reach[which(reach[,2]== get(paste('Brin',k,sep=''))[i]),1])) +} +k = k+1 +} +Total <- NULL +for (l in c(1:k)){ +Total <- c(Total,get(paste('Brin',l,sep=''))) +} +Watershed <- Nom_Boucle +if(length(Total) !=0){ +for (m in Total){ +if(length(which(m == Correspondance[,2])) != 0){ +Watershed = c(Watershed,as.character(Correspondance [which(m == Correspondance[,2]),1])) +} +} +Watershed = unique(Watershed) +Watershed = Watershed[order(Watershed)] +for (w in Watershed){ +IDs = c(IDs,Corres_ID_Stations[which(Corres_ID_Stations[,2] == w),1]) +} +Watersheds = c(Watersheds,IDs) +} else { +Watersheds = c(Watersheds,Corres_ID_Stations[which(Corres_ID_Stations[,2] == Nom_Boucle),1]) +} +} +Watersheds = unique(Watersheds) +return(Watersheds) +} + + + +############################################################################################## +############################################################################################## +###### Partie du code qui ajoute les boucles pour les moyennes sur les bassins aux stations ## +############################################################################################## +# Parametres : +# - Nom_Boucle : code de la station etudiee +# - Nom_Modele : Nom du modele (le nom qui sera ecrit quand vous lancerez le modele avec JAMS (attention, ce n'est pas le nom du fichier de sortie!) +# - output_file : Fichier de sortie +# - Dossier_Temp : Dossier ou sera enregistre le modele final (ainsi que des boucles intermediaires) +# - Water2 : la liste des stations en amont de la station etudiee +# - Corres_ID_Stations : 2 colonnes : 1)ID , 2)code des stations +# - SortiesL : Sorties converties en mm (division par la surface) +# - Sorties : Sorties directes du modele (en mm ou sans unite) +# +############################################################################################## +Boucle_Agreg = function(Nom_Boucle,Nom_Modele,output_file,Dossier_Temp,Water2,Corres_ID_Stations,SortiesL,Sorties){ +write.table(paste(' <contextcomponent class="jams.components.conditional.FilteredSpatialContext" enabled="true" name="',Nom_Boucle,'Loop" version="1.1_0"> + <var name="attributeName" value="watershed"/> ',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +write.table(t(Water2),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_bv <- read.table(paste(Dossier_Temp,'Boucle_temp.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) +write.table(t(Sorties),paste(Dossier_Temp,'Boucle_temp3.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Outputs <- read.table(paste(Dossier_Temp,'Boucle_temp3.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp3.txt',sep='')) +write.table(t(SortiesL),paste(Dossier_Temp,'Boucle_temp4.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +OutputsL <- read.table(paste(Dossier_Temp,'Boucle_temp4.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp4.txt',sep='')) +Sorties2 <- NULL +for (k in Sorties){ +Sorties2 <- c(Sorties2,paste(Nom_Boucle,k,sep='')) +} +write.table(t(Sorties2),paste(Dossier_Temp,'Boucle_temp5.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Outputs2 <- read.table(paste(Dossier_Temp,'Boucle_temp5.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp5.txt',sep='')) +SortiesL2 <- NULL +for (k in SortiesL){ +SortiesL2 <- c(SortiesL2,paste(Nom_Boucle,k,sep='')) +} +write.table(t(SortiesL2),paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +OutputsL2 <- read.table(paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) +write.table(paste(' <var name="attributeValues" value="',liste_bv[1,1],'"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="WeightedSumAggregator_2" version="1.0_0"> + <var attribute="',Nom_Boucle,'area" context="',Nom_Modele,'" name="weight"/> + <var attribute="',OutputsL[1,1],'" context="',Nom_Boucle,'Loop" name="value"/> + <var attribute="',OutputsL2[1,1],'" context="TimeLoop" name="sum"/> + </component> + <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="WeightedSumAggregator_3" version="1.0_0"> + <var attribute="',Nom_Boucle,'areaweight" context="',Nom_Boucle,'Loop" name="weight"/> + <var attribute="',Outputs[1,1],'" context="',Nom_Boucle,'Loop" name="value"/> + <var attribute="',Outputs2[1,1],'" context="TimeLoop" name="sum"/> + </component> + </contextcomponent>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +} + +# Boucle pour ecrire les composantes du debit dans le TimeLoop (XML) +############################################################################################## +############################################################################################## +###### Ajout des composantes du debit au TimeLoop ############################################ +############################################################################################## +# Parametres : +# - Num_Brin : Brin correspondant a la station etudiee +# - output_file : Fichier de sortie +# - variable : Nom des variables ecrites dans le TimeLoop +# +############################################################################################## +Boucle_output <- function (Num_Brin,output_file,variable){ +write.table(paste(' <attribute id="',variable,'_',Num_Brin,'"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +} + + +# Boucle pour ecrire les variables de bassins dans le TimeLoop (XML) +############################################################################################## +############################################################################################## +###### Ajout des variables de bassins au TimeLoop ############################################ +############################################################################################## +# Parametres : +# - Nom_Boucle : Code de la station etudiee +# - output_file : Fichier de sortie +# - Dossier_Temp : Dossier ou sera enregistre le modele final (ainsi que des boucles intermediaires) +# - SortiesL : Sorties converties en mm (division par la surface) +# - Sorties : Sorties directes du modele (en mm ou sans unite) +# +############################################################################################## +Boucle_sorties <- function(Nom_Boucle,output_file,Dossier_Temp,SortiesL,Sorties){ +write.table(t(Sorties),paste(Dossier_Temp,'Boucle_temp3.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Outputs <- read.table(paste(Dossier_Temp,'Boucle_temp3.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp3.txt',sep='')) +write.table(t(SortiesL),paste(Dossier_Temp,'Boucle_temp4.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +OutputsL <- read.table(paste(Dossier_Temp,'Boucle_temp4.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp4.txt',sep='')) +Sorties2 <- NULL +for (k in Sorties){ +Sorties2 <- c(Sorties2,paste(Nom_Boucle,k,sep='')) +} +SortiesL2 <- NULL +for (k in SortiesL){ +SortiesL2 <- c(SortiesL2,paste(Nom_Boucle,k,sep='')) +} +for (z in c(Sorties2,SortiesL2)){ +write.table(paste(' <attribute id="',z,'"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +} +} + + +############################################################################################## +############################################################################################## +###### Boucle pour ecrire les composantes du debit dans TimeLoop depuis ReachLoop(.dat) ###### +############################################################################################## +# Parametres : +# - variable : Nom des variables ecrites dans le TimeLoop +# - Correspondance : Correspondance entre code station et brin ou obtenir le debit +# - Stations : liste des stations retenues +# - liste_br : liste des brins correspondant aux stations(suivant l'ordre des stations) +# - output_file : Fichier de sortie +# - indice_var : indice dans la boucle sur les variables +# +############################################################################################## +SwitchContext <- function(variable,Correspondance,Stations,liste_br,output_file,indice_var){ +write.table(paste(' <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="',variable,'" version="1.0_1"> + <var name="values" value="',liste_br[1,1],'"/> + <var attribute="ID" context="ReachLoop" name="attribute"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +for (i in Stations){ +Nom_Boucle = i +k=0 +while (k !=length(which(Correspondance[,1]==Nom_Boucle))){ +Num_Brin <- as.character(Correspondance[which(Correspondance[,1]==Nom_Boucle),2][k+1]) +write.table(paste(' <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="',Nom_Boucle,'_',(10*(indice_var-1))+(k+1),'" version="1.0_0"> + <var attribute="',variable,'" context="ReachLoop" name="value"/> + <var attribute="',variable,'_',Num_Brin,'" context="TimeLoop" name="sum"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +k = k+1 +} +} +write.table(' </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + + + +############################################################################################## +############################################################################################## +###### Mise en place du catchmentReseter (remettre les variables a 0 dans J2000) ############# +############################################################################################## +# Parametres : +# - Model_Outputs : Autres variables pour lesquelles on calcule la moyenne sur tout le bassin (dont l'unite est differente du litre) +# - Model_OutputsL : Autres variables pour lesquelles on calcule la moyenne sur tout le bassin (dont l'unite est le litre) +# - Dossier_Temp : Dossier ou sera enregistre le modele final (ainsi que des boucles intermediaires) +# - Barrage : si 'oui', on ajoute les variables issues du module de barrages +# +############################################################################################### +catchResetfunction <- function (Model_Outputs,Model_OutputsL,Dossier_Temp,Barrage){ +Sorties2 <- NULL +for (i in Stations){ +Nom_Boucle = i +for (k in Sorties){ +Sorties2 <- c(Sorties2,paste(Nom_Boucle,k,sep='')) +} +} +SortiesL2 <- NULL +for (i in Stations){ +Nom_Boucle = i +for (k in SortiesL){ +SortiesL2 <- c(SortiesL2,paste(Nom_Boucle,k,sep='')) +} +} +Flow <- NULL +for (i in Stations){ +Nom_Boucle = i +k=0 +while (k !=length(which(Correspondance[,1]==Nom_Boucle))){ +Num_Brin <- as.character(Correspondance[which(Correspondance[,1]==Nom_Boucle),2][k+1]) +for (variable in variables){ +Flow <- c(Flow,paste(variable,'_',Num_Brin,sep='')) +} +k=k+1 +} +} +Barrages_var = NULL +if (Barrage == 'oui'){ +for (k in as.character(liste_deriv[,Der_colNom])){ +Barrages_var = c(Barrages_var, paste(k,'FO',sep='')) +Barrages_var = c(Barrages_var, paste(k,'_tvRD1',sep=''),paste(k,'_tvRD2',sep=''),paste(k,'_tvRG1',sep=''),paste(k,'_tvRG2',sep='')) +} +for (k in as.character(liste_barrage[,Bar_colNom])){ +Barrages_var = c(Barrages_var, paste(k,'_actFO',sep='')) +Barrages_var = c(Barrages_var, paste(k,'_Storage',sep='')) +} +} +write.table(t(c(Model_Outputs,Model_OutputsL,Sorties2,SortiesL2,Flow,Barrages_var)),paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Catch_Reset <- read.table(paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),sep='\t') +file.remove(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) +Catch_Reset +} + +############################################################################################## +############################################################################################## +###### Partie du code qui est responsable de l'application de l'irrigation ################### +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# +############################################################################################## +IrrigationModule1 = function (output_file){ +write.table('<component class="irrigation.IrrigationInit" enabled="true" name="IrrigationInit" version="1.0_0"> + <var attribute="irrigationStart" context="HRULoop" name="start"/> + <var attribute="irrigationEnd" context="HRULoop" name="end"/> + <var attribute="time" context="J2K_rhone" name="time"/> + <var attribute="irrigationActive" context="HRULoop" name="irrigated"/> + </component> + <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="IrrigationContext_1" version="1.0_1"> + <var name="values" value="0;1"/> + <var attribute="irrigationActive" context="HRULoop" name="attribute"/> + <component class="irrigation.VariableInit" enabled="true" name="VariableInit_1" version="1.0_0"> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="irrigationTotal" context="HRULoop" name="irrigationTotal"/> + </component> + <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="IrrigationApplication" version="1.0_1"> + <var name="values" value="1;2;3"/> + <var attribute="irrig_type" context="HRULoop" name="attribute"/> + <component class="irrigation.IrrigationApplicationAspersion" enabled="true" name="IrrigationApplicationAspersion" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="precip" context="HRULoop" name="precip"/> + <var attribute="irrigationWater" context="HRULoop" name="irrigationWater"/> + <var attribute="irrigationTotal" context="HRULoop" name="irrigationTotal"/> + </component> + <component class="irrigation.IrrigationApplicationDrip" enabled="true" name="IrrigationApplicationDrip" version="1.0_0"> + <var attribute="actMPS" context="HRULoop" name="actMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var attribute="irrigationWater" context="HRULoop" name="irrigationWater"/> + <var attribute="irrigationTotal" context="HRULoop" name="irrigationTotal"/> + </component> + <component class="irrigation.IrrigationApplicationSurface" enabled="true" name="IrrigationApplicationSurface" version="1.0_0"> + <var attribute="netRain" context="HRULoop" name="netRain"/> + <var attribute="irrigationWater" context="HRULoop" name="irrigationWater"/> + <var attribute="irrigationTotal" context="HRULoop" name="irrigationTotal"/> + </component> + </contextcomponent> + </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + + + +############################################################################################## +############################################################################################## +###### Partie du code qui est responsable du calcul de la demande en irrigation ############## +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# +# Options : +# Le code cree 3 modules (module initial developpe par Sven Kralisch, un module avec calcul de la dose par potET - actET et un module avec possibilite de choisir la dose max ainsi que le taux de remplissage des reservoirs MPS et LPS +# +############################################################################################## +IrrigationModule2 = function (output_file){ +write.table(' <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="IrrigationContext_2" version="1.0_1"> + <var name="values" value="1"/> + <var attribute="irrigationActive" context="HRULoop" name="attribute"/> + <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="IrrigationDemandContext" version="1.0_1"> + <var name="values" value="1;2;3"/> + <var attribute="irrig_type" context="HRULoop" name="attribute"/> + <component class="irrigation.IrrigationDemand" enabled="false" name="IrrigationDemand_Asp" version="1.0_0"> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="0.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_maxDose_MPS" enabled="true" name="IrrigationDemand_maxDose_MPS_Asp" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var name="efficiency" value="0.7"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="satMPSexp" value="0.25"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="0.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var name="satLPSexp" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="waterRequirements" context="HRULoop" name="waterRequirements"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_potET" enabled="false" name="IrrigationDemand_Asp_potET" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var name="irrigationDemandCorrectionET" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + <var name="maxDosis" value="0"/> + <var name="etDeficit" value="0.5"/> + </component> + <component class="irrigation.IrrigationDemand" enabled="false" name="IrrigationDemand_Drip" version="1.0_0"> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="0.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_maxDose_MPS" enabled="true" name="IrrigationDemand_maxDose_MPS_Drip" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var name="efficiency" value="0.9"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="satMPSexp" value="0.25"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="0.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var name="satLPSexp" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="waterRequirements" context="HRULoop" name="waterRequirements"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_potET" enabled="false" name="IrrigationDemand_Drip_potET" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var name="irrigationDemandCorrectionET" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + <var name="maxDosis" value="0"/> + <var name="etDeficit" value="0.5"/> + </component> + <component class="irrigation.IrrigationDemand" enabled="false" name="IrrigationDemand_Surf" version="1.0_0"> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="1.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_maxDose_MPS" enabled="true" name="IrrigationDemand_maxDose_MPS_Surf" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var name="efficiency" value="0.5"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="satMPSexp" value="0.25"/> + <var name="irrigationDemandCorrectionMPS" value="1.0"/> + <var name="irrigationDemandCorrectionLPS" value="0.0"/> + <var name="etDeficit" value="0.5"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var name="satLPSexp" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="waterRequirements" context="HRULoop" name="waterRequirements"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + </component> + <component class="irrigation.IrrigationDemand_potET" enabled="false" name="IrrigationDemand_Surf_potET" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="subbasin" context="HRULoop" name="subBasin"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var name="irrigationDemandCorrectionET" value="0.9"/> + <var attribute="hrus" context="J2K_rhone" name="hrus"/> + <var attribute="irrigationDemand" context="HRULoop" name="irrigationDemand"/> + <var attribute="potET" context="HRULoop" name="potET"/> + <var name="maxDosis" value="0"/> + <var name="etDeficit" value="0.5"/> + </component> + </contextcomponent> + </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + +############################################################################################## +############################################################################################## +###### Partie du code qui est responsable du prelevement de l'irrigation dans la riviere ##### +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# +############################################################################################## +IrrigationModule3 = function (output_file){ +write.table(' <component class="irrigation.IrrigationWaterTransfer_act" enabled="true" name="IrrigationWaterTransfer_act" version="1.0_0"> + <var attribute="inRD1" context="ReachLoop" name="inRD1"/> + <var attribute="inRD2" context="ReachLoop" name="inRD2"/> + <var attribute="actRD2" context="ReachLoop" name="actRD2"/> + <var attribute="inRG2" context="ReachLoop" name="inRG2"/> + <var attribute="actRD1" context="ReachLoop" name="actRD1"/> + <var attribute="totalTransfer" context="ReachLoop" name="totalTransfer"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var attribute="actRG2" context="ReachLoop" name="actRG2"/> + <var attribute="actRG1" context="ReachLoop" name="actRG1"/> + <var attribute="inRG1" context="ReachLoop" name="inRG1"/> + <var name="actPrel" value="0.5"/> + <var attribute="totalDemand" context="ReachLoop" name="totalDemand"/> + <var attribute="totalInput" context="ReachLoop" name="totalInput"/> + </component>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + + +############################################################################################## +############################################################################################## +###### Fonction permettant de passer la FO de ReachLoop vers TimeLoop ######################## +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# - liste_deriv : liste des derivations (avec minimum brin prelevement et brin de sortie) +# - Der_colNom : Indice de la colonne ayant le nom des derivations +# - Der_colBrinEntree : Indice de la colonne ayant le brin d'entree des derivations +# +############################################################################################## +Derivation1 = function (output_file,liste_deriv,Der_colNom,Der_colBrinEntree){ +write.table(' <contextcomponent class="jams.components.conditional.SwitchContext" enabled="true" name="FO_Trans" version="1.0_1">',output_file,col.names=F,row.names=F,quote=F,append=T) +write.table(t(liste_deriv[,Der_colBrinEntree]),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_dv <- read.table(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) +write.table(t(liste_deriv[,Der_colBrinEntree]),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +write.table(paste(' <var name="values" value="',as.character(liste_dv[1,1]),'"/> + <var attribute="ID" context="ReachLoop" name="attribute"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +for (k in as.character(liste_deriv[,Der_colNom])){ +write.table(paste(' <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="',k,'" version="1.0_0"> + <var attribute="',k,'FO" context="TimeLoop" name="sum"/> + <var attribute="FO" context="ReachLoop" name="value"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } +write.table(' </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + +############################################################################################## +############################################################################################## +###### Fonction permettant de transferer le volume d'un point a un autre ##################### +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# - liste_deriv : liste des derivations (avec minimum brin prelevement et brin de sortie) +# - Der_colNom : Indice de la colonne ayant le nom des derivations +# - Der_colBrinEntree : Indice de la colonne ayant le brin d'entree des derivations +# - Der_colBrinSortie : Indice de la colonne ayant le brin de sortie des derivations +# +############################################################################################## +Derivation2 = function (output_file,liste_deriv,Der_colNom,Der_colBrinEntree,Der_colBrinSortie){ +for (k in c(1:length(as.character(liste_deriv[,Der_colNom])))){ + +write.table(paste(' <component class="org.unijena.j2k.routing.Reach2ReachTransfer" enabled="true" name="',as.character(liste_deriv[k,Der_colNom]),'" version="1.0_0"> + <var name="targetNames" value="inRD1;inRD2;inRG1;inRG2"/> + <var name="sourceNames" value="actRD1;actRD2;actRG1;actRG2"/> + <var attribute="reaches" context="J2K_rhone" name="reaches"/> + <var name="targetReachID" value="',liste_deriv[k,Der_colBrinSortie],'"/> + <var attribute="',as.character(liste_deriv[k,Der_colNom]),'FO" context="TimeLoop" name="upperBound"/> + <var attribute="',as.character(liste_deriv[k,Der_colNom]),'_tvRD1;',as.character(liste_deriv[k,Der_colNom]),'_tvRD2;',as.character(liste_deriv[k,Der_colNom]),'_tvRG1;',as.character(liste_deriv[k,Der_colNom]),'_tvRG2" context="TimeLoop" name="volumes"/> + <var name="lowerBound" value="0"/> + <var name="sourceReachID" value="',liste_deriv[k,Der_colBrinEntree],'"/> + <var name="fraction" value="1"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } + +} + +############################################################################################## +############################################################################################## +###### Partie du code qui ajoute les FO reelles au TimeLoop ################################## +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# - liste_barrage : liste des ouvrages (avec minimum Smax(enMm3) la capacite max de l'ouvrage,V0(enMm3) le volume en janvier et un brin sortie) +# - Bar_colNom : Indice de la colonne ayant le nom des barrages +# - Bar_colBrinSortie : Indice de la colonne ayant le brin de sortie des barrages +# +############################################################################################## +Barrage1 = function(output_file,liste_barrage,Bar_colNom,Bar_colBrinSortie){ + +write.table(t(liste_barrage[,Bar_colBrinSortie]),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_bar <- read.table(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) + +write.table(paste(' <contextcomponent class="jams.components.conditional.SwitchContext" enabled="false" name="FO_final" version="1.0_1"> + <var name="values" value="',as.character(liste_bar[1,1]),'"/> + <var attribute="ID" context="ReachLoop" name="attribute"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +for (k in as.character(liste_barrage[,Bar_colNom])){ + +write.table(paste(' <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="',k,'" version="1.0_0"> + <var attribute="actFO" context="ReachLoop" name="value"/> + <var attribute="',k,'_actFO" context="TimeLoop" name="sum"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } + +write.table(' </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) + +} + +############################################################################################## +############################################################################################## +###### Partie du code qui ajoute les stocks dans les barrages au TimeLoop #################### +############################################################################################## +# Parametres : +# - output_file : Fichier de sortie +# - liste_barrage : liste des ouvrages (avec minimum Smax(enMm3) la capacite max de l'ouvrage,V0(enMm3) le volume en janvier et un brin sortie) +# - Bar_colNom : Indice de la colonne ayant le nom des barrages +# - Bar_colBrinSortie : Indice de la colonne ayant le brin de sortie des barrages +# +############################################################################################## +Storage = function (output_file,liste_barrage,Bar_colNom,Bar_colBrinSortie){ +write.table(t(liste_barrage[,Bar_colBrinSortie]),paste(Dossier_Temp,'Boucle_temp.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_bar <- read.table(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp.txt',sep='')) + +write.table(paste(' <contextcomponent class="jams.components.conditional.SwitchContext" enabled="false" name="Storage_final" version="1.0_1"> + <var name="values" value="',as.character(liste_bar[1,1]),'"/> + <var attribute="ID" context="ReachLoop" name="attribute"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + + +for (k in as.character(liste_barrage[,Bar_colNom])){ +write.table(paste(' <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="',k,'" version="1.0_0"> + <var attribute="Storage" context="ReachLoop" name="value"/> + <var attribute="',k,'_Storage" context="TimeLoop" name="sum"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } +write.table(' </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) +} + + + +############################################################################################## +############################################################################################## +###### Code qui ecrit le fichier final du modele J2000-Rhone ################################# +############################################################################################## +# Parametres : +# - Auteur : Auteur du modele +# - Stations : liste des stations retenues +# - Nom_Modele : Nom du modele (le nom qui sera ecrit quand vous lancerez le modele avec JAMS (attention, ce n'est pas le nom du fichier de sortie!) +# - Sorties : Sorties directes du modele (en mm ou sans unite) +# - Dossier_Temp : Dossier ou sera enregistre le modele final (ainsi que des boucles intermediaires) +# - SortiesL : Sorties converties en mm (division par la surface) +# - output_file : Fichier de sortie +# - variables : Nom des composantes du debit ecrites dans le TimeLoop +# - Model_OutputsL : Autres variables pour lesquelles on calcule la moyenne sur tout le bassin (dont l'unite est le litre) +# - Model_Outputs : Autres variables pour lesquelles on calcule la moyenne sur tout le bassin (dont l'unite est differente du litre) +# - Barrage : si 'oui', on ajoute les modules relatifs aux barrages +# - Irrigation : si 'oui', on ajoute les modules relatifs a l'irrigation +# +############################################################################################### +EcrituremodeleFunction <- function (Auteur,Stations,Nom_Modele,Sorties,Dossier_Temp,SortiesL,output_file,variables, Model_OutputsL,Model_Outputs,Barrage,Irrigation){ +output_file = paste(Dossier_Temp,output_file,sep='') +#Liste des objets a mettre dans le catchmentReseter +Catch_Reset <- catchResetfunction (Model_Outputs,Model_OutputsL,Dossier_Temp,Barrage) +#Ecriture du modele +write.table(paste('<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<model author="',Auteur,'" date="2013-04-03 00:00" helpbaseurl="http://jams.uni-jena.de/jamswiki" name="',Nom_Modele,'" version="3.0"> + <description><![CDATA[The standard J2K hydrological model]]></description> + <var name="workspaceDirectory" value=""/> + <launcher> + <group name="Main"/> + <group name="Plots & Maps"> + <subgroup name="Plots"/> + <subgroup name="Maps"/> + </group> + <group name="Initialising"> + <property attribute="FCAdaptation" component="InitSoilWater" description="Multiplier for field capacity" name="Multiplier for field capacity" range="0.0;5.0" type="Double"/> + <property attribute="ACAdaptation" component="InitSoilWater" description="Multiplier for air capacity" name="Multiplier for air capacity" range="0.0;5.0" type="Double"/> + <property attribute="initRG1" component="InitGroundWater" description="Initial storage of RG1 relative to maximum storage" name="InitGroundWater.initRG1" range="0.0;1.0" type="Double"/> + <property attribute="initRG2" component="InitGroundWater" description="Initial storage of RG2 relative to maximum storage" name="InitGroundWater.initRG2" range="0.0;1.0" type="Double"/> + </group> + <group name="Interception"/> + <group name="Snow"/> + <group name="SoilWater"/> + <group name="Groundwater"/> + <group name="ReachRouting"> + <property attribute="flowRouteTA" component="J2KProcessReachRouting" description="Flood routing coefficient" name="flowRouteTA" range="0.0;100.0" type="Double"/> + </group> + </launcher> + <datastores> + <outputdatastore context="TimeLoop" enabled="true" name="TimeLoop"> + <trace>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=F) +# Application de la boucle pour sortir les composantes du debit dans TimeLoop ### +for (i in Stations){ +Nom_Boucle = i +Boucle_sorties(Nom_Boucle,output_file,Dossier_Temp,SortiesL,Sorties) +k=0 +while (k !=length(which(Correspondance[,1]==Nom_Boucle))){ +Num_Brin <- as.character(Correspondance[which(Correspondance[,1]==Nom_Boucle),2][k+1]) +for (variable in variables){ +Boucle_output(Num_Brin,output_file,variable) +} +k = k+1 +} +} +# Reprise du code +# Ecriture du HRULoop et du ReachLoop +write.table(paste(' </trace> + </outputdatastore> + <outputdatastore context="ReachLoop" enabled="false" name="ReachLoop"> + <trace> + <attribute id="reachOutRD1"/> + <attribute id="reachOutRD2"/> + <attribute id="reachOutRG1"/> + <attribute id="simRunoff"/> + </trace> + </outputdatastore> + <outputdatastore context="HRULoop" enabled="false" name="HRULoop"> + <trace/> + </outputdatastore> + </datastores> + <preprocessors> + <metaprocessor class="jams.components.concurrency.ConcurrentContextProcessor" context="HRULoop" enabled="true"> + <property name="partitioner_class" value="jams.components.concurrency.EntityPartitioner"/> + <property name="exclude_component" value="J2KProcessRouting,HRU2ReachRouting;SpatialWeightedSumAggregator1;SpatialWeightedSumAggregator2"/> + <property name="scale_factor" value="1"/> + </metaprocessor> + </preprocessors> +# Parametres globaux du modele (snow_trs, snow_trans, date notamment) + <attributelists/> + <attribute class="jams.data.Attribute$Double" name="snow_trs" value="2"/> + <attribute class="jams.data.Attribute$TimeInterval" name="timeInterval" value="1985-01-01 07:30 2012-12-31 07:30 6 1"/> + <attribute class="jams.data.Attribute$Double" name="snow_trans" value="3"/> + <attribute class="jams.data.Attribute$Integer" name="data_caching" value="2"/> +#Lecture des fichiers parametres + <contextcomponent class="jams.model.JAMSContext" enabled="true" name="ParameterInput" version="1.0_0"> + <component class="org.unijena.j2k.io.StandardEntityReader" enabled="true" name="EntityReader" version="1.2"> + <var name="reachFileName" value="parameter/reach.par"/> + <var name="hruFileName" value="parameter/hrus.par"/> + <var attribute="hrus" context="',Nom_Modele,'" name="hrus"/> + <var attribute="reaches" context="',Nom_Modele,'" name="reaches"/> + </component> + <component class="org.unijena.j2k.io.StandardLUReader" enabled="true" name="LUReader" version="1.1_0"> + <var name="luFileName" value="parameter/landuse.par"/> + <var attribute="hrus" context="',Nom_Modele,'" name="hrus"/> + </component> + <component class="org.unijena.j2k.io.StandardSoilParaReader" enabled="true" name="STReader" version="1.0_0"> + <var name="stFileName" value="parameter/soils.par"/> + <var attribute="hrus" context="',Nom_Modele,'" name="hrus"/> + </component> + <component class="org.unijena.j2k.io.StandardGroundwaterParaReader" enabled="true" name="GWReader" version="1.1_0"> + <var name="gwFileName" value="parameter/hgeo.par"/> + <var attribute="hrus" context="',Nom_Modele,'" name="hrus"/> + </component> + </contextcomponent> +# Initialisation (taille des reservoirs, aire du bassin, coeff cult, LAI...) + <contextcomponent class="jams.model.JAMSContext" enabled="true" name="Initialization" version="1.0_0"> + <contextcomponent class="jams.model.JAMSSpatialContext" enabled="true" name="CatchmentInit" version="1.0_0"> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <component class="org.unijena.j2k.aggregate.SumAggregator" enabled="true" name="AreaCalculator" version="1.0_0"> + <var attribute="area" context="CatchmentInit" name="value"/> + <var attribute="area" context="',Nom_Modele,'" name="sum"/> + </component> + </contextcomponent> + <contextcomponent class="jams.model.JAMSSpatialContext" enabled="true" name="HRUInit" version="1.0_0"> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <component class="org.unijena.j2k.CalcAreaWeight" enabled="true" name="AreaWeight" version="1.0_0"> + <var attribute="area" context="HRUInit" name="entityArea"/> + <var attribute="areaweight" context="HRUInit" name="areaWeight"/> + <var attribute="area" context="',Nom_Modele,'" name="catchmentArea"/> + </component> + <component class="tools.CalcLanduseStateVars_cropcoeff_LAI" enabled="true" name="CalcLanduseStateVars_cropcoeff_LAI" version="1.0_1"> + <var attribute="cropcoeffArray" context="HRUInit" name="cropcoeffArray"/> + <var attribute="LAIArray" context="HRUInit" name="LAIArray"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + </component> + <component class="org.unijena.j2k.soilWater.InitJ2KProcessLumpedSoilWaterStates" enabled="true" name="InitSoilWater" version="1.0_0"> + <var name="ACAdaptation" value="1.0"/> + <var attribute="actLPS" context="HRUInit" name="actLPS"/> + <var name="FCAdaptation" value="1.0"/> + <var attribute="maxMPS" context="HRUInit" name="maxMPS"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <var attribute="rootDepth" context="HRUInit" name="rootDepth"/> + <var attribute="maxLPS" context="HRUInit" name="maxLPS"/> + <var attribute="area" context="HRUInit" name="area"/> + <var attribute="satSoil" context="HRUInit" name="satSoil"/> + <var attribute="actMPS" context="HRUInit" name="actMPS"/> + <var attribute="satMPS" context="HRUInit" name="satMPS"/> + <var attribute="satLPS" context="HRUInit" name="satLPS"/> + </component> + <component class="org.unijena.j2k.groundwater.InitJ2KProcessGroundwater" enabled="true" name="InitGroundWater" version="1.0_0"> + <var attribute="actRG2" context="HRUInit" name="actRG2"/> + <var attribute="actRG1" context="HRUInit" name="actRG1"/> + <var attribute="maxRG2" context="HRUInit" name="maxRG2"/> + <var attribute="maxRG1" context="HRUInit" name="maxRG1"/> + <var name="initRG1" value="0.0"/> + <var name="initRG2" value="0.0"/> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + </component> + </contextcomponent>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +#### Boucle d'initialisation des stations ####### +for (i in Stations){ +Nom_Boucle = i +Water2 = Water(Nom_Boucle,Correspondance,reach,Corres_ID_Stations) +Boucle_Init(Nom_Boucle,Nom_Modele,output_file,Dossier_Temp,Water2,Corres_ID_Stations) +} + +#Boucle temporaire pour la creation des objets pour les moyennes de bassin +write.table(t(Model_OutputsL),paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Model_Outputs2L <- read.table(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) +write.table(t(Model_Outputs),paste(Dossier_Temp,'Boucle_temp6.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +Model_Outputs2 <- read.table(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp6.txt',sep='')) + +#Reprise de l'ecriture +# Ecriture du catchmentResetter +write.table(paste(' </contextcomponent> + <contextcomponent class="jams.model.JAMSTemporalContext" enabled="true" name="TimeLoop" version="1.0_1"> + <var attribute="timeInterval" context="',Nom_Modele,'" name="timeInterval"/> + <var attribute="time" context="',Nom_Modele,'" name="current"/> + <component class="jams.components.gui.JAMSExecInfo" enabled="true" name="ExecutionInfo" version="1.1_0"/> + <component class="org.unijena.j2k.DoubleSetter" enabled="true" name="CatchmentResetter" version="1.0_0"> + <var name="value" value="0"/> + <var attribute="',Catch_Reset[1,1],'" context="TimeLoop" name="attributes"/> + </component> + <contextcomponent class="jams.model.JAMSContext" enabled="true" name="TSInput" version="1.0_0">',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +# Si on a active l'option barrages, on ajoute la lecture du fichier FO.dat +if (Barrage == 'oui'){ +write.table(paste(' <component class="management.TSDataStoreReader_ID" enabled="true" name="FODataReader" version="1.0_0"> + <var name="id" value="FO"/> + <var attribute="dataArrayFO" context="J2K_rhone" name="dataArray"/> + <var attribute="timeInterval" context="J2K_rhone" name="timeInterval"/> + <var attribute="namesFO" context="J2K_rhone" name="names"/> + <var attribute="dataSetNameFO" context="J2K_rhone" name="dataSetName"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } + +#Lecture des fichiers climatiques +write.table(paste(' <component class="jams.components.io.TSDataStoreReader" enabled="true" name="TmeanDataReader" version="1.2"> + <var name="id" value="tmean"/> + <var attribute="timeInterval" context="',Nom_Modele,'" name="timeInterval"/> + <var attribute="xCoordTmean" context="',Nom_Modele,'" name="xCoord"/> + <var attribute="dataArrayTmean" context="',Nom_Modele,'" name="dataArray"/> + <var attribute="regCoeffTmean" context="',Nom_Modele,'" name="regCoeff"/> + <var attribute="elevationTmean" context="',Nom_Modele,'" name="elevation"/> + <var attribute="dataSetNameTmean" context="',Nom_Modele,'" name="dataSetName"/> + <var attribute="yCoordTmean" context="',Nom_Modele,'" name="yCoord"/> + </component> + <component class="jams.components.io.TSDataStoreReader" enabled="true" name="PrecipDataReader" version="1.2"> + <var name="id" value="rain_hybride"/> + <var attribute="timeInterval" context="',Nom_Modele,'" name="timeInterval"/> + <var attribute="xCoordPrecip" context="',Nom_Modele,'" name="xCoord"/> + <var attribute="dataArrayPrecip" context="',Nom_Modele,'" name="dataArray"/> + <var attribute="regCoeffPrecip" context="',Nom_Modele,'" name="regCoeff"/> + <var attribute="elevationPrecip" context="',Nom_Modele,'" name="elevation"/> + <var attribute="dataSetNamePrecip" context="',Nom_Modele,'" name="dataSetName"/> + <var attribute="yCoordPrecip" context="',Nom_Modele,'" name="yCoord"/> + </component> + <component class="jams.components.io.TSDataStoreReader" enabled="true" name="RefETReader" version="1.2"> + <var name="id" value="refet"/> + <var attribute="timeInterval" context="',Nom_Modele,'" name="timeInterval"/> + <var attribute="xCoordRefET" context="',Nom_Modele,'" name="xCoord"/> + <var attribute="dataArrayRefET" context="',Nom_Modele,'" name="dataArray"/> + <var attribute="regCoeffRefET" context="',Nom_Modele,'" name="regCoeff"/> + <var attribute="elevationRefET" context="',Nom_Modele,'" name="elevation"/> + <var attribute="dataSetNameRefET" context="',Nom_Modele,'" name="dataSetName"/> + <var attribute="yCoordRefET" context="',Nom_Modele,'" name="yCoord"/> + </component> + </contextcomponent> + +#Regionalisation des LAI, coeff cultural et fichiers climatiques + <contextcomponent class="jams.model.JAMSSpatialContext" enabled="true" name="HRULoop" version="1.0_0"> + <var attribute="hrus" context="',Nom_Modele,'" name="entities"/> + <contextcomponent class="jams.model.JAMSContext" enabled="true" name="Regionalization" version="1.0_0"> +#LAI + <component class="tools.J2KArrayGrabber_cropcoeff_LAI" enabled="true" name="J2KArrayGrabber_cropcoeff_LAI" version="1.0_0"> + <var attribute="actcropcoeff" context="HRULoop" name="actCropCoeff"/> + <var attribute="time" context="',Nom_Modele,'" name="time"/> + <var attribute="actSlAsCf" context="HRULoop" name="actSlAsCf"/> + <var attribute="cropcoeffArray" context="HRULoop" name="cropcoeffArray"/> + <var attribute="actLAI" context="HRULoop" name="actLAI"/> + <var attribute="LAIArray" context="HRULoop" name="LAIArray"/> + <var name="tempRes" value="d"/> + </component> +#Tmean + <component class="org.unijena.j2000g.lowmem.Regionalisation" enabled="true" name="TmeanLmRegionaliser" version="1.0_0"> + <var attribute="dataArrayTmean" context="J2K_rhone" name="dataArray"/> + <var attribute="regCoeffTmean" context="J2K_rhone" name="regCoeff"/> + <var attribute="tmean" context="HRULoop" name="dataValue"/> + <var attribute="y" context="HRULoop" name="entityY"/> + <var attribute="x" context="HRULoop" name="entityX"/> + <var name="rsqThreshold" value="0"/> + <var attribute="xCoordTmean" context="J2K_rhone" name="statX"/> + <var attribute="tmeanWeights" context="HRULoop" name="statWeights"/> + <var attribute="yCoordTmean" context="J2K_rhone" name="statY"/> + <var attribute="elevationTmean" context="J2K_rhone" name="statElevation"/> + <var name="pidw" value="2"/> + <var attribute="tmeanOrder" context="HRULoop" name="statOrder"/> + <var attribute="elevation" context="HRULoop" name="entityElevation"/> + <var name="nidw" value="4"/> + <var name="elevationCorrection" value="false"/> + </component> +#Precip + <component class="org.unijena.j2000g.lowmem.Regionalisation" enabled="true" name="PrecipLmRegionaliser" version="1.0_0"> + <var attribute="dataArrayPrecip" context="J2K_rhone" name="dataArray"/> + <var attribute="regCoeffPrecip" context="J2K_rhone" name="regCoeff"/> + <var attribute="precip" context="HRULoop" name="dataValue"/> + <var attribute="y" context="HRULoop" name="entityY"/> + <var attribute="x" context="HRULoop" name="entityX"/> + <var name="rsqThreshold" value="0"/> + <var attribute="xCoordPrecip" context="J2K_rhone" name="statX"/> + <var attribute="precipWeights" context="HRULoop" name="statWeights"/> + <var attribute="yCoordPrecip" context="J2K_rhone" name="statY"/> + <var attribute="elevationPrecip" context="J2K_rhone" name="statElevation"/> + <var name="pidw" value="2"/> + <var attribute="precipOrder" context="HRULoop" name="statOrder"/> + <var name="fixedMinimum" value="0"/> + <var attribute="elevation" context="HRULoop" name="entityElevation"/> + <var name="nidw" value="4"/> + <var name="elevationCorrection" value="false"/> + </component> +#Ref ET + <component class="org.unijena.j2000g.lowmem.Regionalisation" enabled="true" name="RefETLmRegionaliser" version="1.0_0"> + <var attribute="dataArrayRefET" context="J2K_rhone" name="dataArray"/> + <var attribute="regCoeffRefET" context="J2K_rhone" name="regCoeff"/> + <var attribute="refET" context="HRULoop" name="dataValue"/> + <var attribute="y" context="HRULoop" name="entityY"/> + <var attribute="x" context="HRULoop" name="entityX"/> + <var name="rsqThreshold" value="0"/> + <var attribute="xCoordRefET" context="J2K_rhone" name="statX"/> + <var attribute="refetWeights" context="HRULoop" name="statWeights"/> + <var attribute="yCoordRefET" context="J2K_rhone" name="statY"/> + <var attribute="elevationRefET" context="J2K_rhone" name="statElevation"/> + <var name="pidw" value="2"/> + <var attribute="refetOrder" context="HRULoop" name="statOrder"/> + <var name="fixedMinimum" value="0"/> + <var attribute="elevation" context="HRULoop" name="entityElevation"/> + <var name="nidw" value="4"/> + <var name="elevationCorrection" value="false"/> + </component> + </contextcomponent>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +# Ajout des premiers modules d'irrigation(application de l'irrigation) +if (Irrigation == 'oui'){ +IrrigationModule1(output_file) +} + +#Reprise de l'ecriture du modele +#Recuperation du crop coeff +write.table(paste(' <component class="crop.CropCoefficient" enabled="true" name="CropCoefficient_1" version="0.1_0"> + <var attribute="potET" context="HRULoop" name="PotET"/> + <var attribute="refET" context="HRULoop" name="RefET"/> + <var attribute="actcropcoeff" context="HRULoop" name="CropCoeff"/> + </component> +# Calcul de la partition pluie neige + <component class="snow.CalcRainSnowParts_IRSTEA" enabled="true" name="CalcRainSnowParts_IRSTEA" version="1.0_0"> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="snow_trs" context="J2K_rhone" name="snow_trs"/> + <var attribute="rain" context="HRULoop" name="rain"/> + <var attribute="precip" context="HRULoop" name="precip"/> + <var attribute="snow" context="HRULoop" name="snow"/> + <var attribute="tmean" context="HRULoop" name="tmean"/> + <var attribute="snow_trans" context="J2K_rhone" name="snow_trans"/> + </component> + # Interception + <component class="interception.J2KProcessInterception_conv_potET" enabled="true" name="J2KProcessInterception3_1" version="1.0_0"> + <var attribute="snow_trs" context="',Nom_Modele,'" name="snow_trs"/> + <var attribute="netSnow" context="HRULoop" name="netSnow"/> + <var name="a_snow" value="1.5"/> + <var attribute="intercStorage" context="HRULoop" name="intercStorage"/> + <var attribute="throughfall" context="HRULoop" name="throughfall"/> + <var attribute="netRain" context="HRULoop" name="netRain"/> + <var attribute="snow_trans" context="',Nom_Modele,'" name="snow_trans"/> + <var name="a_rain" value="1"/> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var attribute="snow" context="HRULoop" name="snow"/> + <var attribute="actLAI" context="HRULoop" name="actLAI"/> + <var attribute="potET" context="HRULoop" name="potET"/> + <var attribute="tmean" context="HRULoop" name="tmean"/> + <var attribute="rain" context="HRULoop" name="rain"/> + <var attribute="interception" context="HRULoop" name="interception"/> + </component> +# Module de neige + <component class="snow.J2KProcessSnow_IRSTEA" enabled="true" name="J2KProcessSnow_IRSTEA" version="1.0_0"> + <var attribute="totDens" context="HRULoop" name="totDens"/> + <var attribute="netSnow" context="HRULoop" name="netSnow"/> + <var name="ccf_factor" value="0.0012"/> + <var name="t_factor" value="1.84"/> + <var attribute="snowCover" context="HRULoop" name="snowCover"/> + <var name="r_factor" value="0.110"/> + <var attribute="tmean" context="HRULoop" name="meanTemp"/> + <var attribute="netRain" context="HRULoop" name="netRain"/> + <var attribute="drySWE" context="HRULoop" name="drySWE"/> + <var attribute="snowColdContent" context="HRULoop" name="snowColdContent"/> + <var name="baseTemp" value="0.1"/> + <var attribute="time" context="J2K_rhone" name="time"/> + <var attribute="dryDens" context="HRULoop" name="dryDens"/> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="snowAge" context="HRULoop" name="snowAge"/> + <var attribute="actSlAsCf" context="HRULoop" name="actSlAsCf"/> + <var name="snowCritDens" value="0.7"/> + <var attribute="snowTotSWE" context="HRULoop" name="snowTotSWE"/> + <var name="active" value="true"/> + <var attribute="snowMelt" context="HRULoop" name="snowMelt"/> + <var attribute="snowDepth" context="HRULoop" name="snowDepth"/> + <var name="g_factor" value="1.739"/> + </component> +# Calcul de l ecoulement de l eau dans le sol + <component class="soil.J2KProcessLumpedSoilWater_Tom" enabled="true" name="J2KProcessLumpedSoilWater_Tom" version="1.1_0"> + <var attribute="inRD1" context="HRULoop" name="inRD1"/> + <var attribute="netRain" context="HRULoop" name="netRain"/> + <var attribute="inRD2" context="HRULoop" name="inRD2"/> + <var name="soilImpGT50" value="0.45"/> + <var name="soilLatVertLPS" value="1"/> + <var name="soilImpGT90" value="0.05"/> + <var attribute="actMPS2" context="HRULoop" name="actMPS2"/> + <var attribute="infAfterMPS" context="HRULoop" name="infAfterMPS"/> + <var name="soilConcRD2" value="1"/> + <var name="soilImpGT10" value="0.85"/> + <var name="soilDistMPSLPS" value="0"/> + <var attribute="netSnow" context="HRULoop" name="netSnow"/> + <var attribute="actMPS" context="HRULoop" name="actMPS"/> + <var attribute="potET" context="HRULoop" name="potET"/> + <var name="soilImpGT0" value="0.95"/> + <var name="soilPolRed" value="0"/> + <var attribute="area" context="HRULoop" name="area"/> + <var attribute="satMPS" context="HRULoop" name="satMPS"/> + <var attribute="outRD1" context="HRULoop" name="outRD1"/> + <var attribute="maxMPS" context="HRULoop" name="maxMPS"/> + <var attribute="outRD2" context="HRULoop" name="outRD2"/> + <var attribute="MaxInfSnow" context="HRULoop" name="soilMaxInfSnow"/> + <var name="soilImpGT60" value="0.35"/> + <var name="soilDiffMPSLPS" value="5"/> + <var attribute="slope" context="HRULoop" name="slope"/> + <var attribute="infiltration" context="HRULoop" name="infiltration"/> + <var name="soilImpGT20" value="0.75"/> + <var attribute="MaxInfSummer" context="HRULoop" name="soilMaxInfSummer"/> + <var name="soilOutLPS" value="5"/> + <var attribute="maxInf2" context="HRULoop" name="maxInf2"/> + <var name="soilImpGT30" value="0.65"/> + <var name="soilMaxPerc" value="20"/> + <var attribute="MobileWater2" context="HRULoop" name="MobileWater2"/> + <var attribute="actET" context="HRULoop" name="actET"/> + <var name="soilImpGT70" value="0.25"/> + <var attribute="infAfterSealedGrade" context="HRULoop" name="infAfterSealedGrade"/> + <var name="soilLinRed" value="0.9"/> + <var attribute="snowDepth" context="HRULoop" name="snowDepth"/> + <var attribute="actLPS" context="HRULoop" name="actLPS"/> + <var attribute="percolation" context="HRULoop" name="percolation"/> + <var attribute="actDPS" context="HRULoop" name="actDPS"/> + <var name="soilMaxDPS" value="0"/> + <var attribute="sealedGrade" context="HRULoop" name="sealedGrade"/> + <var name="soilImpGT40" value="0.55"/> + <var attribute="satLPS" context="HRULoop" name="satLPS"/> + <var attribute="maxLPS" context="HRULoop" name="maxLPS"/> + <var name="soilImpGT80" value="0.15"/> + <var attribute="DeltaETP" context="HRULoop" name="DeltaETP"/> + <var attribute="genRD1" context="HRULoop" name="genRD1"/> + <var attribute="actETintc" context="HRULoop" name="actETintc"/> + <var attribute="snowMelt" context="HRULoop" name="snowMelt"/> + <var name="soilConcRD1" value="1"/> + <var attribute="genRD2" context="HRULoop" name="genRD2"/> + <var attribute="ReductionFactor" context="HRULoop" name="ReductionFactor"/> + <var attribute="satSoil" context="HRULoop" name="satSoil"/> + <var attribute="deltaMPS2" context="HRULoop" name="deltaMPS2"/> + <var attribute="time" context="J2K_rhone" name="time"/> + <var attribute="MaxInfWinter" context="HRULoop" name="soilMaxInfWinter"/> + <var attribute="interflow" context="HRULoop" name="interflow"/> + </component> +# Calcul de l ecoulement souterrain + <component class="soil.J2KProcessGroundwater_satRG1" enabled="true" name="J2KProcessGroundwater_satRG1" version="1.0_2"> + <var attribute="maxMPS" context="HRULoop" name="maxSoilStorage"/> + <var attribute="actMPS" context="HRULoop" name="actSoilStorage"/> + <var attribute="outRD2" context="HRULoop" name="gwExcess"/> + <var name="gwRG2Fact" value="1.0"/> + <var attribute="outRG1" context="HRULoop" name="outRG1"/> + <var attribute="outRG2" context="HRULoop" name="outRG2"/> + <var attribute="maxRG2" context="HRULoop" name="maxRG2"/> + <var attribute="maxRG1" context="HRULoop" name="maxRG1"/> + <var name="gwRG1RG2dist" value="0"/> + <var attribute="inRG1" context="HRULoop" name="inRG1"/> + <var attribute="slope" context="HRULoop" name="slope"/> + <var attribute="actRG2" context="HRULoop" name="actRG2"/> + <var attribute="actRG1" context="HRULoop" name="actRG1"/> + <var attribute="satRG2" context="HRULoop" name="satRG2"/> + <var attribute="satRG1" context="HRULoop" name="satRG1"/> + <var attribute="percolation" context="HRULoop" name="percolation"/> + <var attribute="genRG1" context="HRULoop" name="genRG1"/> + <var name="gwCapRise" value="0"/> + <var name="gwRG1Fact" value="1.0"/> + <var attribute="genRG2" context="HRULoop" name="genRG2"/> + <var attribute="RG2_k" context="HRULoop" name="kRG2"/> + <var attribute="RG1_k" context="HRULoop" name="kRG1"/> + <var attribute="inRG2" context="HRULoop" name="inRG2"/> + </component> +# TemporalSumAggregator (utilite a prouver...) + <component class="jams.components.aggregate.TemporalSumAggregator" enabled="true" name="TemporalSumAggregator1" version="1.0_0"> + <var attribute="time" context="',Nom_Modele,'" name="time"/> + <var attribute="area" context="HRULoop" name="weight"/> + <var attribute="outRD1;outRD2;outRG1;outRG2" context="HRULoop" name="value"/> + <var attribute="outRD1_avg;outRD2_avg;outRG1_avg;outRG2_avg" context="HRULoop" name="sum"/> + <var attribute="timeInterval" context="',Nom_Modele,'" name="aggregationTimeInterval"/> + <var name="average" value="true"/> + </component> +# Routage de HRU a HRU + <component class="jams.components.datatransfer.DoubleTransfer" enabled="true" name="HRU2HRURouting" version="1.0_0"> + <var attribute="outRD1;outRD2;outRG1;outRG2" context="HRULoop" name="values"/> + <var attribute="to_poly" context="HRULoop" name="target"/> + <var name="inNames" value="inRD1;inRD2;inRG1;inRG2"/> + </component> +#Routage de HRU a brin + <component class="jams.components.datatransfer.DoubleTransfer" enabled="true" name="HRU2ReachRouting" version="1.0_0"> + <var attribute="outRD1;outRD2;outRG1;outRG2" context="HRULoop" name="values"/> + <var attribute="to_reach" context="HRULoop" name="target"/> + <var name="inNames" value="inRD1;inRD2;inRG1;inRG2"/> + </component> +# Moyenne sur le bassin complet pour les variables non converties + <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="SpatialWeightedSumAggregator1" version="1.0_0"> + <var attribute="areaweight" context="HRULoop" name="weight"/> + <var attribute="',Model_Outputs2[1,1],'" context="HRULoop" name="value"/> + <var attribute="',Model_Outputs2[1,1],'" context="TimeLoop" name="sum"/> + </component> +# Moyenne sur le bassin complet pour les variables a convertir + <component class="org.unijena.j2k.aggregate.WeightedSumAggregator" enabled="true" name="SpatialWeightedSumAggregator2" version="1.0_0"> + <var attribute="area" context="',Nom_Modele,'" name="weight"/> + <var attribute="',Model_Outputs2L[1,1],'" context="HRULoop" name="value"/> + <var attribute="',Model_Outputs2L[1,1],'" context="TimeLoop" name="sum"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +#Ajout des modules d'irrigation pour le calcul de la demande +if (Irrigation == 'oui'){ +IrrigationModule2 (output_file) +} + +#Reprise de l'ecriture +#Boucle sur les stations pour les moyennes de bassins +write.table(' </contextcomponent>',output_file,col.names=F,row.names=F,quote=F,append=T) + +for (i in Stations){ +Nom_Boucle = i +Water2 = Water(Nom_Boucle,Correspondance,reach,Corres_ID_Stations) +Boucle_Agreg(Nom_Boucle,Nom_Modele,output_file,Dossier_Temp,Water2,Corres_ID_Stations,SortiesL,Sorties) +} +#Ecriture du ReachLoop +write.table(paste(' <contextcomponent class="jams.model.JAMSSpatialContext" enabled="true" name="ReachLoop" version="1.0_0"> + <var attribute="reaches" context="',Nom_Modele,'" name="entities"/>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) +#Ajout du module d'irrigation permettant de calculer la quantite d'eau disponible pour l'irrigation +if (Irrigation == 'oui'){ +IrrigationModule3 (output_file) +} +# Ajout de la regionalisation des barrages (necessaire pour extraire la valeur journaliere de la FO +if (Barrage == 'oui'){ +write.table(paste(' <component class="management.Regionalisation_Dam" enabled="true" name="Regionalisation_Dam" version="1.0_0"> + <var attribute="dataArrayFO" context="J2K_rhone" name="dataArray"/> + <var attribute="Smax" context="ReachLoop" name="Smax"/> + <var attribute="namesFO" context="J2K_rhone" name="names"/> + <var attribute="ID" context="ReachLoop" name="ID"/> + <var attribute="reaches" context="J2K_rhone" name="entities"/> + <var attribute="FO" context="ReachLoop" name="dataValue"/> + </component> +# Module pour l application de la fonction objectif (on stocke ou on relache) + <component class="management.DamDevice" enabled="true" name="DamDevice" version="3.0_0"> + <var attribute="Storage" context="ReachLoop" name="Storage"/> + <var attribute="inRG1" context="ReachLoop" name="inRG1"/> + <var attribute="time" context="J2K_rhone" name="time"/> + <var attribute="inRD2" context="ReachLoop" name="inRD2"/> + <var attribute="FO" context="ReachLoop" name="FO"/> + <var attribute="inRD2" context="ReachLoop" name="inRD1"/> + <var attribute="Smax" context="ReachLoop" name="Smax"/> + <var attribute="V0" context="ReachLoop" name="V0"/> + <var attribute="actFO" context="ReachLoop" name="FO_fin"/> + <var attribute="inRG2" context="ReachLoop" name="inRG2"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + } +# Module de routage brin a brin +write.table(paste(' <component class="org.unijena.j2k.routing.J2KProcessReachRouting" enabled="true" name="J2KProcessReachRouting" version="1.0_1"> + <var attribute="inRD2" context="ReachLoop" name="inRD2"/> + <var attribute="inRD1" context="ReachLoop" name="inRD1"/> + <var attribute="catchmentRD1" context="TimeLoop" name="catchmentRD1"/> + <var attribute="catchmentRD2" context="TimeLoop" name="catchmentRD2"/> + <var attribute="catchmentSimRunoff" context="TimeLoop" name="catchmentSimRunoff"/> + <var attribute="reachOutRD2" context="ReachLoop" name="outRD2"/> + <var attribute="reachOutRD1" context="ReachLoop" name="outRD1"/> + <var attribute="slope" context="ReachLoop" name="slope"/> + <var attribute="actRG2" context="ReachLoop" name="actRG2"/> + <var attribute="actRG1" context="ReachLoop" name="actRG1"/> + <var attribute="length" context="ReachLoop" name="length"/> + <var attribute="TA" context="ReachLoop" name="flowRouteTA"/> + <var name="slopeAsProportion" value="false"/> + <var attribute="simRunoff" context="ReachLoop" name="simRunoff"/> + <var attribute="reachOutRG1" context="ReachLoop" name="outRG1"/> + <var attribute="width" context="ReachLoop" name="width"/> + <var attribute="reachOutRG2" context="ReachLoop" name="outRG2"/> + <var attribute="catchmentRG2" context="TimeLoop" name="catchmentRG2"/> + <var attribute="rough" context="ReachLoop" name="roughness"/> + <var attribute="channelStorage" context="ReachLoop" name="channelStorage"/> + <var attribute="reaches" context="J2K_rhone" name="entities"/> + <var attribute="catchmentRG1" context="TimeLoop" name="catchmentRG1"/> + <var attribute="inRG1" context="ReachLoop" name="inRG1"/> + <var attribute="actRD2" context="ReachLoop" name="actRD2"/> + <var attribute="actRD1" context="ReachLoop" name="actRD1"/> + <var name="tempRes" value="d"/> + <var attribute="inRG2" context="ReachLoop" name="inRG2"/> + </component> +# Agregation stock en riviere + <component class="org.unijena.j2k.aggregate.SumAggregator" enabled="true" name="ChannelStorageAggregator" version="1.0_0"> + <var attribute="channelStorage" context="ReachLoop" name="value"/> + <var attribute="channelStorage" context="TimeLoop" name="sum"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +# Ajout de la boucle pour passer les composantes du debit depuis le ReachLoop vers le TimeLoop +liste_brins = NULL +for (i in Stations){ +liste_brins <- c(liste_brins,as.character(Correspondance[which (Correspondance[,1] == i),2])) +} +write.table(t(liste_brins),paste(Dossier_Temp,'Boucle_temp2.txt',sep=''),col.names=F,row.names=F,quote=F,append=F,sep=';') +liste_br <- read.table(paste(Dossier_Temp,'Boucle_temp2.txt',sep='')) +file.remove(paste(Dossier_Temp,'Boucle_temp2.txt',sep='')) +for (variable in variables){ +indice_var = as.numeric(which(variable==variables)) +SwitchContext(variable,Correspondance,Stations,liste_br,output_file,indice_var) +} + +# Ajout des derivations (identification de la FO des derivations) et sortie des variables FO reelle et stockage dans le barrage +if (Barrage =='oui'){ +Derivation1 (output_file,liste_deriv,Der_colNom,Der_colBrinEntree) +Barrage1 (output_file,liste_barrage,Bar_colNom,Bar_colBrinSortie) +Storage (output_file,liste_barrage,Bar_colNom,Bar_colBrinSortie) +} + +#Module pour la conversion des composantes du debit (de L/j a L/s) +write.table(paste(' </contextcomponent> + <component class="jams.components.tools.JAMSUnitConverter" enabled="true" name="SimRunoffConverter" version="1.0_0"> + <var name="outUnit" value="L/s"/> + <var attribute="catchmentSimRunoff_qm" context="TimeLoop" name="outValue"/> + <var name="inUnit" value="L/day"/> + <var attribute="catchmentSimRunoff" context="TimeLoop" name="inValue"/> + </component> + <component class="jams.components.tools.JAMSUnitConverter" enabled="true" name="SimRD2Converter" version="1.0_0"> + <var name="outUnit" value="L/s"/> + <var attribute="catchmentRD2_qm" context="TimeLoop" name="outValue"/> + <var name="inUnit" value="L/day"/> + <var attribute="catchmentRD2" context="TimeLoop" name="inValue"/> + </component> + <component class="jams.components.tools.JAMSUnitConverter" enabled="true" name="SimRD1Converter" version="1.0_0"> + <var name="outUnit" value="L/s"/> + <var attribute="catchmentRD1_qm" context="TimeLoop" name="outValue"/> + <var name="inUnit" value="L/day"/> + <var attribute="catchmentRD1" context="TimeLoop" name="inValue"/> + </component> + <component class="jams.components.tools.JAMSUnitConverter" enabled="true" name="SimRG1Converter" version="1.0_0"> + <var name="outUnit" value="L/s"/> + <var attribute="catchmentRG1_qm" context="TimeLoop" name="outValue"/> + <var name="inUnit" value="L/day"/> + <var attribute="catchmentRG1" context="TimeLoop" name="inValue"/> + </component> + <component class="jams.components.tools.JAMSUnitConverter" enabled="true" name="SimRG2Converter" version="1.0_0"> + <var name="outUnit" value="L/s"/> + <var attribute="catchmentRG2_qm" context="TimeLoop" name="outValue"/> + <var name="inUnit" value="L/day"/> + <var attribute="catchmentRG2" context="TimeLoop" name="inValue"/> + </component>',sep=''),output_file,col.names=F,row.names=F,quote=F,append=T) + +#Application des FO pour les derivations +if(Barrage == 'oui'){ +Derivation2 (output_file,liste_deriv,Der_colNom,Der_colBrinEntree,Der_colBrinSortie) +} +#Fin du fichier +write.table(' </contextcomponent> +</model>',output_file,col.names=F,row.names=F,quote=F,append=T) +} \ No newline at end of file diff --git a/irrigation-R-codes/lib_old/J2000_postprocessing_functions.R b/irrigation-R-codes/lib_old/J2000_postprocessing_functions.R new file mode 100644 index 0000000000000000000000000000000000000000..8178efc529b67466a2f93868b28b1a73737806e6 --- /dev/null +++ b/irrigation-R-codes/lib_old/J2000_postprocessing_functions.R @@ -0,0 +1,311 @@ +### J2000 post-processing functions for simulation results analysis +### Base = simulations with daily time step (but could maybe work for other time steps, not tested yet) + +####### INTERANNUAL MONTHLY AVERAGE ANALYSIS ######### + +### Function J2000MonthStats +### function that calculates monthly statistics for J2000 outputs +## (discharge, runoff contribution, mean saturation levels, evapotranspiration, runoff coeff) +## inputs: +## - multivariate zoo object from ReadTimeLoop function (daily time step, runoff in m3/s, rain and evap in mm) +## - size of catchment in m2 (for calculation of runoff coefficient) + +J2000MonthStats <- function(Data, size) +{ +# Extract discharge data from Data object (total runoff + runoff contributions + obsrunoff) +Q <- Data[,4:11] +# Extract rain and evapotranspiration data (Potential ET and Actual ET) +RET <- Data[,1:3] + +# Calculate monthly mean values for each discharge column and saturation levels +# For average values for each month of each year +Qmonth <-aggregate(Q,format(as.Date(index(Q)), '%m'),FUN=mean) + +# Calculate cumulate values for rain and evapotranspiration for each month fo each year +temp <-aggregate(RET,as.yearmon(index(RET)),FUN=sum) +# Then calculate interannual average for each month +RETmonth <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + +# Calculate runoff coefficient for each month of each year (daily time step only!!) +temp2 <-aggregate(Q$catchmentSimRunoff_qm*86400,as.yearmon(index(Q),FUN=sum))/(temp$precip*size/1000) +# Calculate average monthly stat +RCmonth <- aggregate(temp2,format(as.Date(index(temp2)), '%m'),FUN=mean) +# Calculate average soil saturation +Soilsat <- aggregate((Q$satLPS+Q$satMPS)/2, format(as.Date(index(Q)), '%m'),FUN=mean) + +# Merge and return zoo object +Month <- merge(Qmonth,RETmonth,RCmonth,Soilsat) +Month +} + +### Function J2000ReachMonthStats +### function that calculates base monthly statistics for J2000 outputs +## (discharge, runoff contributions) +## input: +## - multivariate zoo object from ReadReachExtraction function (daily time step, runoff in m3/s) + +J2000ReachMonthStats <- function(Data) +{ + # Extract total runoff + Q <- Data[,1] + # Extract runoff contributions + Contribs <- Data[,3:6] + + # Calculate monthly mean values for each discharge column + # For average values for each month of each year + Qmonth <-aggregate(Q,format(as.Date(index(Q)), '%m'),FUN=mean) + # same for contributions + Contribsmonth <-aggregate(Contribs,format(as.Date(index(Contribs)), '%m'),FUN=mean) + + # Merge and return zoo object + Month <- merge(Qmonth,Contribsmonth) + Month +} + +### Function J2000MonthMax +### function that selects the max discharge of each month of each year and then calculates the average interannual value + max interannual value +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +J2000MonthMax<- function(Data) +{ + # Select the max discharge for each month of each year + temp <-aggregate(Data,as.yearmon(index(Data)),FUN=max) + # Calculate interannual mean of each month + MonthmaxMean <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + # Calculate max interannual value of each month + MonthmaxMax <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=max) + # Merge and return zoo object + Month <- merge(MonthmaxMean,MonthmaxMax) + Month +} + +### Function J2000MonthThresh +### function that selects discharge data above a certain threshold +## counts lenght of events (number of days) and volume (m3) and computes monthly stats on them +## inputs: +## - univariate zoo object of simulatedRunoff from ReadTimeLoop function (daily time step, runoff in m3/s) +## - value of discharge threshold in m3/s +J2000MonthThresh <- function(Data, thresh) +{ +# Select events above threshold +Events <- subset(Data, Data >= thresh) + +# Calculate number of days above threshold for each month +temp <-aggregate(Events,as.yearmon(index(Events)),FUN=length) +# Calulate mean number of days per month +Nbdays <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + +# Calculate mean discharge above threshold +Q<- aggregate(Events,format(as.Date(index(Events)), '%m'),FUN=mean) + +# Calculate flow volume above threshold for each month +temp <-aggregate(Events*86400,as.yearmon(index(Events)),FUN=sum) +# aggregate +Vol <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + +# Merge and return zoo object +Month <- merge(Nbdays, Q, Vol) +#Qmonth <-aggregate(na.omit(Q),chron(as.character(as.Date(as.yearmon(index(na.omit(Q))))),format=c(dates="y-m-d")),FUN=mean) +Month +} + +### Function J2000MonthUnderThresh +### function that selects and analyses monthly discharge data below a certain threshold +## counts lenght of events (number of days), mean discharge under thresh (m3/s) and volume (m3) +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +## - value of discharge threshold in m3/s +J2000MonthUnderThresh <- function(Data, thresh) +{ +# Select events above threshold +Events <- subset(Data, Data < thresh) + +# Calculate number of days under threshold for each month +temp <-aggregate(Events,as.yearmon(index(Events)),FUN=length) +# aggregate +Nbdays <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + +# Calculate mean discharge under threshold +Q<- aggregate(Events,format(as.Date(index(Events)), '%m'),FUN=mean) + +# Calculate flow volume under threshold for each month +temp <-aggregate(Events*86400,as.yearmon(index(Events)),FUN=sum) +# aggregate +Vol <-aggregate(temp,format(as.Date(index(temp)), '%m'),FUN=mean) + +# Merge and return zoo object +res <- merge(Nbdays, Q, Vol) + +# return result +res +} + + +######## YEARLY AVERAGE ANALYSIS ################## + +### Function J2000YearStats +### function that calculates yearly statistics for J2000 outputs +## (discharge, runoff contribution, mean saturation levels, evapotranspiration, runoff coeff) +## inputs: +## - multivariate zoo object from ReadTimeLoop function (daily time step, runoff in m3/s, rain and evap in mm) +## - size of catchment in m2 (for calculation of runoff coefficient) +J2000YearStats <- function(Data, size) +{ + # Extract discharge data from Data object (total runoff + runoff contributions) + Q <- Data[,4:11] + # Extract rain and evapotranspiration data + RET <- Data[,1:3] + + # Calculate yearly mean values for each discharge column and saturation levels + Qyear <-aggregate(Q,format(as.Date(index(Q)), '%Y'),FUN=mean) + + # Calculate yearly cumulate values for rain and evapotranspiration + RETyear <-aggregate(RET,format(as.Date(index(RET)), '%Y'),FUN=sum) + # Calculate runoff coefficient (daily time step only!!) + RCyear <-aggregate(Q$catchmentSimRunoff_qm*86400,format(as.Date(index(Q)),'%Y'),FUN=sum) / (RETyear$precip*size/1000) + + # Calculate average soil saturation + Soilsat <- aggregate((Q$satLPS+Q$satMPS)/2, format(as.Date(index(Q)), '%Y'),FUN=mean) + + # Merge and return zoo object + Year <- merge(Qyear, RETyear, RCyear,Soilsat) + #Qmonth <-aggregate(na.omit(Q),chron(as.character(as.Date(as.yearmon(index(na.omit(Q))))),format=c(dates="y-m-d")),FUN=mean) + Year +} + +### Function J2000ReachYearStats +### function that calculates base yearly statistics for J2000 outputs +## (discharge, runoff contributions) +## input: +## - multivariate zoo object from ReadReachExtraction function (daily time step, runoff in m3/s) + +J2000ReachYearStats <- function(Data) +{ + # Extract total runoff + Q <- Data[,1] + # Extract runoff contributions + Contribs <- Data[,3:6] + + # Calculate monthly mean values for each discharge column + # For average values for each month of each year + Qyear <-aggregate(Q,format(as.Date(index(Q)), '%Y'),FUN=mean) + # same for contributions + Contribsyear <-aggregate(Contribs,format(as.Date(index(Contribs)), '%Y'),FUN=mean) + + # Merge and return zoo object + Year <- merge(Qyear,Contribsyear) + Year +} + +### Function J2000YearMax +### function that selects the max discharge of each year +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +J2000YearMax<- function(Data) +{ + Yearmax <-aggregate(Data,format(as.Date(index(Data)), '%Y'),FUN=max) + Yearmax +} + +### Function J2000YearThresh +### function that selects and analyses yearly and monthly discharge data above a certain threshold +## counts lenght of events (number of days) and volume (m3) +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +## - value of discharge threshold in m3/s +J2000YearThresh <- function(Data, thresh) +{ +# Select events above threshold +Events <- subset(Data, Data >= thresh) + +# Calculate yearly number of days above threshold +Nbdays <-aggregate(Events,format(as.Date(index(Events)), '%Y'),FUN=length) + +# Calculate mean discharge above threshold +Q<- aggregate(Events,format(as.Date(index(Events)), '%Y'),FUN=mean) + +# Calculate yearly flow volume above threshold +Vol <-aggregate(Events*86400,format(as.Date(index(Events)), '%Y'),FUN=sum) + +# Merge and return zoo object +Year <- merge(Nbdays, Q, Vol) +Year +} + + +### Function J2000YearUnderThresh +### function that selects and analyses yearly and monthly discharge data below a certain threshold +## counts lenght of events (number of days) and volume (m3) +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +## - value of discharge threshold in m3/s +J2000YearUnderThresh <- function(Data, thresh) +{ +# Select events under threshold +Events <- subset(Data, Data < thresh) + +# Calculate yearly number of days under threshold +Nbdays <-aggregate(Events,format(as.Date(index(Events)), '%Y'),FUN=length) + +# Calculate mean discharge under threshold +Q<- aggregate(Events,format(as.Date(index(Events)), '%Y'),FUN=mean) + +# Calculate yearly flow volume under threshold (daily time step only) +Vol <-aggregate(Events*86400,format(as.Date(index(Events)), '%Y'),FUN=sum) + +# Merge and return zoo object +Year <- merge(Nbdays, Q, Vol) +Year +} + +### Function J2000QIndicators +### function that calculates a set of indicators for a given time series: mean interannual discharge, max discharge, % of low flow (under thresh) +### and relative percentages of base flow, interflow and surface runoff +## inputs: +## - univariate zoo object of simulatedRunoff (daily time step, runoff in m3/s) +## - multivariate zoo object of flow contributions (in order RD1, RD2, RG1, RG2) +## - threshold for low flow +J2000QIndicators <- function(Q, contribs, thresh) +{ + # Calculate mean discharge + MeanQ <- mean(Q, na.rm = TRUE) + # Calculate max discharge + MaxQ <- max(Q, na.rm = TRUE) + # Select events under threshold + Events <- subset(Q, Q < thresh) + # Calculate low flow indicator (percentage of days under thresh for the time series) + Low <- length(Events)/length(Q)*100 + # Calculate relative contrib of RD1 + RD1 <- mean(contribs[,1], na.rm = TRUE)/MeanQ + # Calculate relative contrib of RD2 + RD2 <- mean(contribs[,2], na.rm = TRUE)/MeanQ + # Calculate relative contrib of RG1 + RG1 <- mean(contribs[,3], na.rm = TRUE)/MeanQ + # Calculate relative contrib of RG2 + RG2 <- mean(contribs[,4], na.rm = TRUE)/MeanQ + + # Merge and return set of indicators + Indic <- c(MeanQ, MaxQ, Low, RD1,RD2,RG1,RG2) + Indic +} + +### Function J2000balanceIndicators +### function that calculates a set of indicators for a given time series: mean soil moisture, mean PET, mean RET +## inputs: +## - bivariate zoo object of soil moisture (daily time step, MPS/LPS) +## - bivariate zoo object of evapotranspiration (PET actET in order) +J2000balanceIndicators <- function(moist, ET) +{ + # Calculate mean soil moisture + Meanmoist <- mean((moist[,1]+moist[,2])/2) + # Calculate mean annual ET + # Calculate yearly cumulate values evapotranspiration + ETyear <-aggregate(ET,format(as.Date(index(ET)), '%Y'),FUN=sum) + MeanPET <- mean(ETyear[,1]) + MeanactET <- mean(ETyear[,2]) + + # Merge and return set of indicators + Indic <- c(Meanmoist, MeanPET, MeanactET) + Indic +} + diff --git a/irrigation-R-codes/lib_old/MDR_AERMCprelev.r b/irrigation-R-codes/lib_old/MDR_AERMCprelev.r new file mode 100644 index 0000000000000000000000000000000000000000..d8ddeb849820e1ab8b70ee3b55f78f4383a19bc1 --- /dev/null +++ b/irrigation-R-codes/lib_old/MDR_AERMCprelev.r @@ -0,0 +1,112 @@ +# Fonctions calculant les prélèvements moyens interannuels par canton sur les données AERMC. +# ***************************************************************************************** + +# Unités : dans Chronique_PrelevRMC_Cantons.txt, les prélèvements sont en x 1000 m3/yr. + +# 81 : GRAVITAIRE. 82 : NON-GRAVITAIRE. 85 (for yr >=2008): CANAUX. +# on va surtout s'intéresser dans un 1er temps au NON-GRAV (82) sur 1987-2007. + +# ***************************************************************************************** +library(maptools) +library(rgeos) +library(sp) +library(raster) +library(xts) +library(zoo) + +# ***************************************************************************************** +# -------------------------------Prelev82_1987_2007------------------------------- +# Prelev82_1987_2007 <- function() +# Calculates the mean annual water withdrawal across all cantons over 1987-2007 +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding mean annual water withdrawal +# ----------------------------------------------------------------------------- +Prelev82_1987_2007=function(){ # m3/yr NTC + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev82=NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann = Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) + Prelev_date= as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") + Prelev82_ann = xts(Prelev82_ann, Prelev_date) + Prelev82 = rbind(Prelev82,mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev82data=cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev82) + colnames(Prelev82data)=c('canton','Prelev82') + return(Prelev82data) +} + +# ***************************************************************************************** + +Prelev8182_1987_2007=function(){ # m3/yr NTC + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev8182=NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann = Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) + Prelev81_ann = Prelev[which(Prelev[,1] == cant),4] * 1000 #(m3) + Prelev_date= as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") + + Prelev82_ann = xts(Prelev82_ann, Prelev_date) + Prelev81_ann = xts(Prelev81_ann, Prelev_date) + + Prelev8182 = rbind(Prelev8182,mean(Prelev81_ann["1987/2007"])+mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev8182data=cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) + colnames(Prelev8182data)=c('canton','Prelev8182') + + return(Prelev8182data) +} + +# ***************************************************************************************** + +Prelev8182_2008_2012=function(){ # m3/yr NTC + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev8182=NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann = Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) + Prelev81_ann = Prelev[which(Prelev[,1] == cant),4] * 1000 #(m3) + Prelev_date= as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") + + Prelev82_ann = xts(Prelev82_ann, Prelev_date) + Prelev81_ann = xts(Prelev81_ann, Prelev_date) + + Prelev8182 = rbind(Prelev8182,mean(Prelev81_ann["2008/2012"])+mean(Prelev82_ann["2008/2012"])) # prélèvement annuel moyen sur 2008 - 2012 + } + Prelev8182data=cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) + colnames(Prelev8182data)=c('canton','Prelev8182') + + return(Prelev8182data) +} + + + +# 2. Sur quels cantons simule-t-on l'irrigation ? +# ************************************************ +# CantonsIrr <- readShapeSpatial('~/DATA/SIG_MDR/irrigation/cantons_irrigues_3.shp', proj4string=CRS ("+init=epsg:2154"),verbose=FALSE,repair=FALSE,IDvar=NULL,force_ring=FALSE,delete_null_obj=FALSE, retrieve_ABS_null=FALSE) +# NumCantonsIrr=CantonsIrr$CODE_CAN_1 +# +# Prelev82_noscantons=Prelev82data[Prelev82data[,1]%in%NumCantonsIrr,] +# write.table(Prelev82_noscantons,'~/Documents/MDR/irrigation/Prelev82_1987_2007_M3_noscantonsirrigues.txt',append=F, sep="\t", row.names=FALSE, col.names=c('Canton', 'Prelev82_m3_87_2007')) # en m3 + + + +# 3. CANTONS AERMC vs NOS CANTONS IRRIGUES +# ****************************************** +Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) +allcantons=Prelev$Canton[order(unique(Prelev$Canton))] + +# nos cantons - 104 +Cantons_Rhone <- c(101,117,118,119,120,140,518,717,722,724,1333,2602,2604,2607,2611,2613,2615,2616,2619,2621,2623,2625,2626,2628,2629,2632,2634,3006,3009, 3016, 3023,3026,3802,3807,3808,3815,3819,3822,3824,3825,3830,3837,3846,3853,4213,4233,6907,6924,6931,6937,6938,6944,6945,6948,6949,7405,8405,8406,8409,8413,8415,8416,8418,8423) +Cantons_Durance <- c(410,413,414,416,419,420,421,427,429,430,505,509,512,515,516,522,523,524,1307, 1309,1312,1326,1327,1331,8319,8408,8411) +Cantons_Saone <- c(102,126,135,2103,2114,2134,2138,3909,6905,6910,6925,7116,7151) + +un_canton=c(Cantons_Rhone,Cantons_Durance,Cantons_Saone) +un_canton <- un_canton[order(un_canton)] + +irrigated= allcantons %in% un_canton +rep(0,length(allcantons)) +write.table(irrigated,'~/Documents/MDR/irrigation/irrigated.txt',append=F, sep="\t", row.names=FALSE, col.names=FALSE) + diff --git a/irrigation-R-codes/lib_old/MDR_utilitaires.r b/irrigation-R-codes/lib_old/MDR_utilitaires.r new file mode 100644 index 0000000000000000000000000000000000000000..20dc17f356b8eb464593ee7f2a46266434631e80 --- /dev/null +++ b/irrigation-R-codes/lib_old/MDR_utilitaires.r @@ -0,0 +1,1242 @@ +library(lubridate) + + + + +##################################################################################### +# ANALYSE SORTIES J2000 +##################################################################################### + +#~* look for NIU -> not in use +#~* or for NTK -> need to keep +#~* maybe even NTC -> need to check + +# actually don't look for NIU, there's way too much, just keep the one with either NTK or NTC + +#~* PROGRAMMER: Isabelle GOUTTEVIN (Irstea Lyon) +#~------------------------------------------------------------------------------ +#~* CREATED/MODIFIED: Created 26-08-2015 +#~------------------------------------------------------------------------------ + +# LAUNCH_SIMU(xx, model, params%xx) +# lance une simu en batch +#~------------------------------------------------------------------------------ +# NIU +launch_simu=function(dir, model ,parameters=NULL){ + # INPUTS : + # dir : /home/isabelle.gouttevin/JAMS/submodels/Arve_loc + # model : j2k_tmin_tmax_mail_wildcard.jam + # parameters= c(1,2) // vector. + MODEL=paste(dir,'/', model,sep="") + OPTIONS='-Xms150m -Xmx4096m -XX:MaxPermSize=128m -Dsun.java2d.d3d=false -Djava.library.path=../../bin/win64' + MAINCLASS='/home/isabelle.gouttevin/JAMS/jams/jams-bin/jams-starter.jar' + OPTIONS2=' -n -c /home/isabelle.gouttevin/JAMS/jams/default.jap ' + + PARAM=NULL ; for (l in c(1:length(parameters))){if (l < length(parameters)){PARAM=paste(PARAM,parameters[l],';',sep='')} else {PARAM=paste(PARAM,parameters[l],sep='')}} + if (!is.null(parameters)){ + system(paste('cd ', dir, ' ; java ', OPTIONS,' -jar ',MAINCLASS,OPTIONS2,' -m ', MODEL,' -p "', PARAM,'"',sep='')) + } + else { + system(paste('cd ', dir,' ; java ', OPTIONS,' -jar ',MAINCLASS,OPTIONS2,' -m ', MODEL ,sep='')) + } +} + +# NIU +# TREATSIMU +# --------------------------------------------------------------------------- +#~* Converts a simulation by J2000 (Timeloop.dat) into a zoo time-serie +# featuring the DISCHARGES at the desired stations (StationsOI )= +# Stations Of Interest +# +#~* IN : simu = ReadTimeLoopDaily('../../Timeloop.dat') +#~* StationsOI = c("X3000010","....") # code station BanqueHydro/MDR +#~* +#~* OUT : xts object contenant les débits (m3/s) journaliers aux StationsOI +#~* +#~* PARAMETER (chemin à renseigner) : +#~* - la table "Correspondance" entre les stations et les brins du modèle +#~* - le Debut et fin de simulation +#~----------------------------------------------------------------------------- +treatsimu=function(simu, StationsOI){ + library(xts) + window = '1987-01-01/2012-12-31' + Correspondance = read.table ('~/Documents/MDR/Correspondance_brins_stations_rhone.csv',sep=";",dec=".") # correspondance entre brins du reach.par et code des stations (station, brin, x) + + Reaches=Correspondance[!is.na(match(Correspondance[,1],StationsOI)),2] + Stations=Correspondance[!is.na(match(Correspondance[,1],StationsOI)),1] + mm=match(Correspondance[,1],StationsOI) # NA or index of StationOI (station 1, station 2, etc..) wihere there is a match + Order=mm[!is.na(mm)] + Cumul=table(Order) # For each match, (station 1, station 2, etc...), how many reaches ? + Names <- names(simu) + indexes=match(paste('simRunoff_',Reaches,sep=''),Names) # indices de la colomne des reaches d'intérêt dans la simu + discharges=simu[ ,indexes]/1000/24/3600 # m3/s + + SIM0 = rep(NULL, length(simu[,1])) + for (i in 1:length(StationsOI)){ + if(Cumul[i]==1){ + SIM_tmp=as.vector(discharges[,which(Order==i)]) + }else{ + SIM_tmp=as.vector(apply(discharges[,which(Order==i)],1,sum)) + } + SIM=cbind(SIM0,SIM_tmp) + SIM0=SIM + } + SIM=xts(SIM,time(simu)) + return(SIM[window]) +} + +# NIU +# pr simus sur submodels +# ---------------------- +treatsimu_exutoire=function(simu){ + library(xts) + window = '1987-01-01/2012-12-31' + Names <- names(simu) + indexe=match(paste('catchmentSimRunoff',sep=''),Names) # indices de la colomne des reaches d'intérêt dans la simu + if(is.na(indexe)) indexe=match(paste('simRunoff',sep=''),Names) #nom dans le ReachLoop + if(is.na(indexe)) indexe=match(paste('reachStation',sep=''),Names) + SIM=as.numeric(simu[ ,indexe])/1000/24/3600 # m3/s + SIM=xts(SIM,time(simu)) + colnames(SIM)=c("Q") + return(SIM[window]) +} + +# ?? NIU +# ------- +treatsimu_window=function(simu, StationsOI, window){ + library(xts) + Correspondance = read.table ('~/Documents/MDR/Correspondance_brins_stations_rhone.csv',sep=";",dec=".") # correspondance entre brins du reach.par et code des stations (station, brin, x) + + Reaches=Correspondance[!is.na(match(Correspondance[,1],StationsOI)),2] + Stations=Correspondance[!is.na(match(Correspondance[,1],StationsOI)),1] + mm=match(Correspondance[,1],StationsOI) # NA or index of StationOI (station 1, station 2, etc..) wihere there is a match + Order=mm[!is.na(mm)] + Cumul=table(Order) # For each match, (station 1, station 2, etc...), how many reaches ? + Names <- names(simu) + indexes=match(paste('simRunoff_',Reaches,sep=''),Names) # indices de la colomne des reaches d'intérêt dans la simu + discharges=simu[ ,indexes]/1000/24/3600 # m3/s + + SIM0 = rep(NULL, length(simu[,1])) + for (i in 1:length(StationsOI)){ + if(Cumul[i]==1){ + SIM_tmp=as.vector(discharges[,which(Order==i)]) + }else{ + SIM_tmp=as.vector(apply(discharges[,which(Order==i)],1,sum)) + } + SIM=cbind(SIM0,SIM_tmp) + SIM0=SIM + } + SIM=xts(SIM,time(simu)) + names(SIM)=StationsOI + return(SIM[window]) +} + +# TREATOBS NIU +# --------------------------------------------------------------------------- +#~* Converts the observed discharge at Stations Of Interest (StationsOI) +#~* into a zoo time-serie +#~* IN : StationsOI = c("X3000010","....") # code station BanqueHydro/MDR +#~* OBS discharges in L/s +#~* +#~* OUT : xts object contenant les débits (m3/s) journaliers aux StationsOI +#~* +#~* PARAMETER (chemin à renseigner) : +#~* le chemin et le nom des fichiers contenant les obs +#~* (format : $ {Station}_OBS_ls.txt) +# --------------------------------------------------------------------------- +treatobs=function(StationsOI){ + library(xts) + deb = '1987-01-01' + fin = '2012-12-31' + OBS0=NULL + for ( i in StationsOI){ + + OBS_tmp <- as.vector(read.table(paste('~/DATA/HYDRO/Obs_New/',i,'_OBS_ls.txt',sep=''),sep='\t',skip=1)[,4]/1000) # m3/s + OBS=cbind(OBS0,OBS_tmp) + OBS0=OBS + } + + OBS=xts(OBS,seq(as.POSIXct(deb, format="%Y-%m-%d"),as.POSIXct(fin, format="%Y-%m-%d"),by="1 d") + ) + return(OBS) +} + +# TREATOBS_inedites NIU +#~------------------------------------------------------------------------------- +#~* For "inedites stations" : REF : Le Gros, C., Sauquet, E., Lang, M., Achard, A.-L., Leblois, E., Bitton, B., 2015. +#~* Les Annuaires Hydrologiques de la Société Hydrotechnique De France : Une source d’information patrimoniale pour la connaissance de l’hydrologie en France. La Houille Blanche, 4, 66-77 +#~* doi: 10.1051/lhb/20150048. +#~------------------------------------------------------------------------------- +treatobs_inedite=function(Station){ + + obsfile=grep(Station,readLines(pipe('ls ~/DATA/MDR_stations_inédites/observations/*')), value=TRUE) + OBS = as.vector(read.table(obsfile,sep=';',skip=3, fill=TRUE)[,4]/1000) # m3/s + OBS = OBS[1:length(OBS)-1] + + time=as.Date(as.character(read.table(obsfile,sep=';',skip=3, fill=TRUE)[,3]), format="%Y%m%d") + time=time[1:length(time)-1] + + OBS=zoo(OBS,time) + return(OBS) +} + +# SELECT_OPTIMUM NIU +# ---------------------------------------------------------------------------- +# sélection du BEST dans un optimizer.dat +# - bv "Arve_loc" +# - optim_outputdir : "Optim_asp" +# - effcriteria_name : J2K_optim___main_eff_kge_normalized' +# - parameter_name : 'snow.J2KProcessSnow_IRSTEA_tmin_tmax___t_factor' +# ---------------------------------------------------------------------------- +select_optimum=function(bv, optim_outputdir,effcriteria_name,parameter_name){ # here optimal val for tf + optim=read.table(paste('~/JAMS/submodels/',bv,'/output/',optim_outputdir,'/optimizer.dat', sep=''), skip=10) + colnames(optim)=scan(paste('~/JAMS/submodels/',bv,'/output/',optim_outputdir,'/optimizer.dat', sep=''), skip=5, nlines=1, what='character') + ind=which(optim[,effcriteria_name] == min(optim[,effcriteria_name]))[1] # select only one minimum + tf=round(optim[ind,parameter_name],2) + return(tf) +} + + +# GRAPHIQUES +# ------------------------------------------------------------------------------ + +# trace_graphes +# ------------------------------------------------------------------------------ +#~* Creates the beautiful analyse-graph for discharge simulations at a station: +#~* LEFT side : monthly discharge over 1987-2012 +#~* daily discharge over a window +#~* RIGHT side : mean annual cycle of modelled/observed discharge +#~* +#~* IN : - StationsOI = c("X3000010","....") # code station BanqueHydro/MDR +#~* - NamesOI =c("Durance Cadarache", "..") +#~* - SIM : zoo object with the simulated discharges (Ncols=length(StationsOI)) +#~* - OBS : zoo object with the observed discharges (Ncols=length(StationsOI)) +#~* - pdfname : ex : "mypdf.pdf" +#~* - graphefile_name : where to save that pdf ? +#~* - fullts : if true, the details of daily discherges are plotted after the main recap plot +#~* +#~* OUT : mypdf.pdf +#~* +#~* DEPENDANCES (chemin à renseigner) : +#~* aggregateZoo_functions.r +# ------------------------------------------------------------------------------ +# NIU +trace_graphes=function(StationsOI, NamesOI, SIM, OBS, pdfname=NULL, graphefile_name=NULL,fullts=NULL){ + source('lib/aggregateZoo_functions.r') + library(lubridate) + library(xts) + library(grid) + + if(!is.null(pdfname) && !is.null(graphefile_name)){pdf(paste(graphefile_name,pdfname,sep=''),paper="special",width=12,height=8)} + + SIM=SIM["1987-01-01/2012-12-31"] + + OBSm=aggregateZoo(OBS,'my',mean) + SIMm=aggregateZoo(SIM,'my',mean) + + OBScycle=aggregateZoo(OBS, "m", mean) + SIMcycle=aggregateZoo(SIM, "m", mean) + + for (i in 1:length(StationsOI)){ + + layout(matrix(c(1:3,4,4,4),3,2)) + par (pty="m") # Paramètre pour avoir un graphe de taille max + + # 1. Tracé des débits mensuels + plot(SIMm[,i],type='l',ylim=c(0,max(na.omit(SIMm[,i]),na.omit(OBSm[,i]))),main=NamesOI[i],col=2, xlab='Date',ylab='Q(m3/s)', cex.main=2) + lines(OBSm[,i],col=1) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + + # 2. Tracé d'une fenêtre de débits journaliers (originale François : 8037:9131 == 2009-01-01:2011-12-31) + par (pty="m") + date1=as.POSIXct("1987-01-01") + date2=as.POSIXct("1990-12-31") + #date1=as.POSIXct("2005-01-01") + #date2=as.POSIXct("2008-12-31") + + index1=which(time(SIM)==date1) + index2=which(time(SIM)==date2) + plot(SIM[index1:index2,i],type='l',main='Journalier 1987-1990',ylim=c(0,max(SIM[,i],na.omit(OBS[,i]))), xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(SIM[index1:index2,i],col=2) + lines(OBS[index1:index2,i],col=1) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + + # 2.2. + par (pty="m") + date1=as.POSIXct("2009-01-01") + date2=as.POSIXct("2012-12-31") + + index1=which(time(SIM)==date1) + index2=which(time(SIM)==date2) + plot(SIM[index1:index2,i],type='l',main='Journalier 2009-2012',ylim=c(0,max(SIM[,i],na.omit(OBS[,i]))), xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(SIM[index1:index2,i],col=2) + lines(OBS[index1:index2,i],col=1) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + + + # 3. Tracé du cycle mensuel moyen des débits + + MAX<- max(na.omit(OBScycle[,i]),na.omit(SIMcycle[,i])) # hauteur max du graphe + par (pty="m") + plot(SIMcycle[,i],main = "Moyenne mensuelle interannuelle des débits",ylim=c(0, MAX*1.1),font.lab=2,type="l",col=2,xaxt = "n",xlab="Months",ylab="Q(m3/s)") + lines(OBScycle[,i], col=1) + ticks<-as.numeric(time(OBScycle)) + labs <- substr(month.abb,1,1) + Axis(side = 1, at = ticks, labels = labs) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + #plot.new() #new page + + if(fullts==TRUE){ + #grid.newpage() + Nbyears=round(dim(SIM)[1]/365) + Nbrow=round(Nbyears/3/2)+1*(Nbyears/3/2>round(Nbyears/3/2)) + Nbpages=round(Nbrow/3)+1*(Nbrow/3>round(Nbrow/3)) + mystartyr = year(SIM[1]) + for (page in 1:Nbpages){ + layout(matrix(c(1:6),3,2)) + par (pty="m") + yrstart=mystartyr+(page-1)*6*3 # pr la page + yrend=min(mystartyr+(page)*6*3-1,year(SIM[length(SIM)]))#pr la page + + for (myyrbeg in seq(yrstart,yrend,by=3)){ # chaque plot en lighe de 3 ans + + date1=as.POSIXct(paste(myyrbeg,'-01-01',sep="")) + date2=as.POSIXct(paste(min(myyrbeg+2,yrend),'-12-31',sep="")) + + index1=which(time(SIM)==date1) + index2=which(time(SIM)==date2) + plot(SIM[index1:index2,i],type='l',main=paste('Journalier ',myyrbeg,'-',min(myyrbeg+2,yrend),sep=""),ylim=c(0,max(SIM[,i],na.omit(OBS[,i]))), xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(SIM[index1:index2,i],col=2) + lines(OBS[index1:index2,i],col=1) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + # par(new = T) + # plot(70-Rainmean, col='blue',type='l' axes=F, xlab=NA, ylab=NA) + # axis(side = 4) + # mtext(side = 4, line = 3, 'Rain(mm/d)') + } + } + } + } + + if(!is.null(pdfname) && !is.null(graphefile_name)){graphics.off()} +} + +# trace_graphes_2 : superposer 2 simus +# --------------------------------------------------------------------------- +trace_graphes_2=function(StationsOI, NamesOI, SIM, OBS, sim2, pdfname, graphefile_name, simuname, simuname2){ + if (exists("simuname") & exists("simuname2")){ + NameSim=simuname + NameSim2=simuname2 + } else { + NameSim="SIM-REF" + NameSim2="SIM-test" + } + + source('lib/aggregateZoo_functions.r') + library(lubridate) + library(xts) + pdf(paste(graphefile_name,pdfname,sep=''),paper="special",width=12,height=8) + + OBSm=aggregateZoo(OBS,'my',mean) + SIMm=aggregateZoo(SIM,'my',mean) + sim2m=aggregateZoo(sim2,'my',mean) + + OBScycle=aggregateZoo(OBS, "m", mean) + SIMcycle=aggregateZoo(SIM, "m", mean) + sim2cycle=aggregateZoo(sim2, "m", mean) + + for (i in 1:length(StationsOI)){ + + layout(matrix(c(1:3,4,4,4),3,2)) + par (pty="m") # Paramètre pour avoir un graphe de taille max + + # 1. Tracé des débits mensuels + plot(SIMm[,i],type='l',ylim=c(0,max(SIMm[,i],sim2m[,i],na.omit(OBSm[,i]))),main=NamesOI[i],color=2, xlab='Date',ylab='Q(m3/s)', cex.main=2) + lines(sim2m[,i],col=6) + lines(OBSm[,i],col=1) + + legend("topleft",legend=c("OBS", NameSim, NameSim2),y.intersp = 1, lty= 1,bty="n",col = c(1,2,6),xpd=NA,cex=0.8) + + # 2. Tracé d'une fenêtre de débits journaliers (originale François : 8037:9131 == 2009-01-01:2011-12-31) + par (pty="m") + date1=as.POSIXct("2001-01-01") + date2=as.POSIXct("2003-12-31") + + index1=which(time(OBS)==date1) + index2=which(time(OBS)==date2) + plot(SIM[index1:index2,i],type='l',main='Journalier 1987-1990',ylim=c(0,max(SIM[,i],sim2[,i],na.omit(OBS[,i]))),color=2, xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(sim2[index1:index2,i],col=6) + lines(OBS[index1:index2,i],col=1) + + legend("topleft",legend=c("OBS", NameSim, NameSim2),y.intersp = 1, lty= 1,bty="n",col = c(1,2,6),xpd=NA,cex=0.8) + + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + + # 2.2. + par (pty="m") + date1=as.POSIXct("2009-01-01") + date2=as.POSIXct("2012-12-31") + + index1=which(time(OBS)==date1) + index2=which(time(OBS)==date2) + plot(SIM[index1:index2,i],type='l',main='Journalier 2009-2012',ylim=c(0,max(SIM[,i],sim2[,i],na.omit(OBS[,i]))),color=2, xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(sim2[index1:index2,i],col=6) + lines(OBS[index1:index2,i],col=1) + + legend("topleft",legend=c("OBS",NameSim, NameSim2),y.intersp = 1, lty= 1,bty="n",col = c(1,2,6),xpd=NA,cex=0.8) + + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + + + # 3. Tracé du cycle mensuel moyen des débits + + MAX<- max(na.omit(OBScycle[,i]),SIMcycle[,i], sim2cycle[,i]) # hauteur max du graphe + par (pty="m") + plot(SIMcycle[,i],main = "Moyenne mensuelle interannuelle des débits",ylim=c(0, MAX*1.1),font.lab=2, + type="l",color=2,xaxt = "n",xlab="Months",ylab="Q(m3/s)") + lines(sim2cycle[,i], col=6) + lines(OBScycle[,i], col=1) + + ticks<-as.numeric(time(OBScycle)) + labs <- substr(month.abb,1,1) + Axis(side = 1, at = ticks, labels = labs) + legend("topleft",legend=c("OBS", NameSim, NameSim2),y.intersp = 1, lty= 1,bty="n",col = c(1,2,6),xpd=NA,cex=0.8) + #plot.new() #new page + } + graphics.off() +} + +# tracegrapheBV NIU +# ---------------------------------------------------------------------------- +# CAVEAT : fonctionne uniquement avec 8 BV AEN pr l'instant +# test : tracegrapheBV('~/JAMS/submodels/Arve_loc/output/current/','Arve_loc','~/WORK/AEN/results/graphes/','Arve_global_aasp3.5.pdf') +tracegrapheBV=function(simrep,bv,pdfrep,pdfname){ + source('lib/MDR_trace_graphes_Etienne.r') + source('lib/MDR_utilitaires.r') + source("lib/readwrite_functions_J2000.R") + simu = ReadTimeLoopXTS(simrep,'TimeLoop.dat') + SIM=treatsimu_exutoire(simu) + + if(bv=="Arve_loc"){ + # - Arve à Chamonix Mont Blanc + StationsOI=c("V0002010") + NamesOI=c("L Arve a Chamonix-Mont-Blanc [Pont des Favrands]") + } + if(bv=="Arvan_Amont_loc"){ + # - Arvant Amont ? Saint-Jean-D'Arves + StationsOI=c("W1055020") + NamesOI=c("L Arvan a Saint-Jean-d Arves [La Villette]") + } + if(bv=="Durance_Val_Pres_loc"){ + # La Durance e Val-des-Pres [Les Alberts] + StationsOI=c("X0010010") + NamesOI=c("La Durance e Val-des-Pres [Les Alberts]") + } + if(bv=="Archiane_loc"){ + # - L Archiane ? Treschenu-Creyers [Men?e] + StationsOI=c("V4226010") + NamesOI=c("L Archiane ? Treschenu-Creyers [Men?e]") + meltperiod=seq(1,120) + } + if(bv=="Averole_loc"){ + # - L Averole a Bessans [Averole] + StationsOI=c("W1006010") + NamesOI=c("L Averole a Bessans [Averole]") + } + if(bv=="Issole_loc"){ + # - L Issole ? Saint-Andr?-les-Alpes [Mourefrey] + StationsOI=c("X2114010") + NamesOI=c("L Issole ? Saint-Andr?-les-Alpes [Mourefrey]") + meltperiod=seq(1,120) + } + if(bv=="Borne_loc"){ + # - Le Borne ? Saint-Jean-de-Sixt + StationsOI=c("V0205420") + NamesOI=c("Le Borne ? Saint-Jean-de-Sixt") + } + if(bv=="Doron_loc"){ + # - Le Doron de Bozel ? la Perri?re [Vignotan] + StationsOI=c("W0224010") + NamesOI=c("Le Doron de Bozel ? la Perri?re [Vignotan]") + } + + OBS=treatobs(StationsOI) + trace_graphes(StationsOI, NamesOI, SIM, OBS, pdfname, pdfrep) +} + + +# Trace Graphe AVANT - APRES NIU +# ---------------------------------------------------------------------------- +tracegraphe_avant_apres=function(REF, SIM, OBS, bv, pdfrep,pdfname,colAPRES=NULL){ + if (is.null(colAPRES)){colAPRES='coral'} + source('lib/aggregateZoo_functions.r') + library(lubridate) + library(xts) + pdf(paste(pdfrep,pdfname,sep=''),paper="special",width=12,height=8) + + OBSm=aggregateZoo(OBS,'my',mean) + SIMm=aggregateZoo(SIM,'my',mean) + REFm=aggregateZoo(REF,'my',mean) + + OBScycle=aggregateZoo(OBS, "m", mean) + SIMcycle=aggregateZoo(SIM, "m", mean) + REFcycle=aggregateZoo(REF, "m", mean) + + layout(matrix(c(1,3,4,2,5,5),3,2)) + par (pty="m") # Paramètre pour avoir un graphe de taille max + + # 1. débits mensuels + plot(OBSm,type='l',ylim=c(0,max(na.omit(REFm),na.omit(OBSm))),main=bv, xlab='Date',ylab='Q(m3/s)', cex.main=2) + lines(REFm,col=2) + legend("topleft",legend=c("OBS", "SIM Ref"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + mtext("débits mensuels REF-MDR",3,line=0.2,cex=.7) + + plot(OBSm,type='l',ylim=c(0,max(na.omit(REFm),na.omit(OBSm))),main="", xlab='Date',ylab='Q(m3/s)', cex.main=2) + lines(SIMm,col=colAPRES) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,colAPRES),xpd=NA,cex=0.8) + mtext("débits mensuels Action NEIGE",3,line=0.2,cex=.7) + + # 2. Tracé d'une fenêtre de débits journaliers + par (pty="m") + date1=as.POSIXct("2000-01-01") + date2=as.POSIXct("2003-12-31") + + index1=which(time(OBS)==date1) + index2=which(time(OBS)==date2) + + plot(OBS[index1:index2],type='l',main='',ylim=c(0,max(REF[index1:index2],na.omit(OBS[index1:index2]))), xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(REF[index1:index2],col=2) + legend("topleft",legend=c("OBS", "SIM Ref"),y.intersp = 1, lty= 1,bty="n",col = c(1,2),xpd=NA,cex=0.8) + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + mtext("débits journaliers REF-MDR",3,line=0.2,cex=.7) + + plot(OBS[index1:index2],type='l',main='',ylim=c(0,max(REF[index1:index2],na.omit(OBS[index1:index2]))), xlab='Date',ylab='Q(m3/s)', xaxt="n") + lines(SIM[index1:index2],col=colAPRES) + legend("topleft",legend=c("OBS", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,colAPRES),xpd=NA,cex=0.8) + axis(1, at=seq(date1,date2, by="month"), labels=month(seq(date1,date2, by="month"))) + mtext(c(year(seq(date1,date2, by="year"))), 1, at=seq(date1,date2, by="year"), line=1.7, cex=0.8) + mtext("débits journaliers Action Neige",3,line=0.2,cex=.7) + + # 3. Cycles + MAX<- max(na.omit(OBScycle),na.omit(SIMcycle),na.omit(REFcycle)) # hauteur max du graphe + par (pty="m") + plot(OBScycle,main = "Moyenne mensuelle interannuelle des débits",ylim=c(0, MAX*1.1),font.lab=2, + type="l",xaxt = "n",xlab="Months",ylab="Q(m3/s)") + lines(REFcycle, col=2) + lines(SIMcycle, col=colAPRES) + ticks<-as.numeric(time(OBScycle)) + labs <- substr(month.abb,1,1) + Axis(side = 1, at = ticks, labels = labs) + legend("topleft",legend=c("OBS", "SIM Ref", "SIM"),y.intersp = 1, lty= 1,bty="n",col = c(1,2,colAPRES),xpd=NA,cex=0.8) + dev.off() +} + +# REGRESSIONS SIM-OBS et OBS-SIM pdt melt +# ---------------------------------------------------------------------------- +trace_SIM_OBS_reg_meltperiod=function(OBS,simREF,meltperiod){ + layout(matrix(c(1,2),1,2)) + XX=as.numeric(OBS[which(yday(simREF)%in%meltperiod),]) + YY=as.numeric(simREF[which(yday(simREF)%in%meltperiod),]) + + plot(XX,YY, xlim=c(0,80),ylim=c(0,80),xlab='OBS',ylab='SIM', main='lin reg SIM vs OBS in meltperiod') + reg=lm(YY~XX) + abline(0,1, col='red') + abline(reg) + text(15,80, labels=paste('slope=', round(coef(reg)[2],2), '\n intercept=',round(coef(reg)[1],2), sep=""), pos=1) + + reg2=lm(XX~YY) + plot(YY, XX, xlim=c(0,80),ylim=c(0,80),xlab='SIM',ylab='OBS', main='lin reg OBS vs SIM in meltperiod') + abline(0,1, col='red') + abline(reg2) + text(15,80, labels=paste('slope=', round(coef(reg2)[2],2), '\n intercept=',round(coef(reg2)[1],2), sep=""), pos=1) +} + + +# crit_stats +# ------------------------------------------------------------------ +source("lib/readwrite_functions_J2000.R") +Sys.setenv(TZ='UTC') + +#KGE classique par jour + +KGEday <- function(sim,OBS) +{ + library(hydroGOF) + KGEd=KGE(as.numeric(sim), as.numeric(OBS)) + KGEd +} + +#KGE par sur tous les mois de la p?riode + +KGEmonth <- function(sim,OBS) +{ + library(hydroGOF) + KGEm=KGE(as.numeric(aggregateZoo(sim,'my',mean)),as.numeric(aggregateZoo(OBS,'my',mean)),na.rm=T) + KGEm +} + +#KGE sur le cycle par mois + +KGEcycle <- function(sim,OBS) +{ + library(hydroGOF) + KGEc=KGE(as.numeric(aggregateZoo(sim,'m',mean)),as.numeric(aggregateZoo(OBS,'m',mean))) + KGEc +} + +# KGE seulement sur les jours de fonte unique # jours avec moins de 1 mm de precip lorsqu'il fait plus de 1?C +# 8-03-2017 :: introduction de meltperiod + +KGEwithoutRainyDays <- function(sim,OBS,inputfolder,rainfilename,tempfilename,meltperiod) +{ + sim=sim[which(yday(sim)%in%meltperiod)] + OBS=OBS[which(yday(OBS)%in%meltperiod)] + Rain=ReadJ2000_Input(inputfolder,rainfilename) + Temp=ReadJ2000_Input(inputfolder,tempfilename) + + Rainmean=rowMeans(Rain) + Tempmean=rowMeans(Temp) + + RainyDays=which((Rainmean>1 & Tempmean>1)) + RainyDaysp1=RainyDays+1 + RainyDaysp2=RainyDays+2 + + period=index(Rain) + + periodwRainyDays=period[-c(RainyDays,RainyDaysp1,RainyDaysp2)] + + simwRD=sim[periodwRainyDays,] + OBSwRD=OBS[periodwRainyDays,] + + KGEwRD=KGE(simwRD,OBSwRD) + KGEwRD +} + +# KGE seulement sur les jours de pluie durant la p?riode de fonte # jours avec plus de 1 mm de precip lorsqu'il fait plus de 1?C + +KGERainyDays <- function(sim,OBS,inputfolder,rainfilename,tempfilename,meltperiod) +{ + sim=sim[which(yday(sim)%in%meltperiod)] + OBS=OBS[which(yday(OBS)%in%meltperiod)] + + Rain=ReadJ2000_Input(inputfolder,rainfilename) + Temp=ReadJ2000_Input(inputfolder,tempfilename) + + Rainmean=apply(Rain, 1, 'mean') + Tempmean=apply(Temp, 1, 'mean') + + RainyDays=which((Rainmean>1 & Tempmean>1)) + + period=time(Rain) + + periodRainyDays=period[RainyDays] + simRD=sim[periodRainyDays,] + OBSRD=OBS[periodRainyDays,] + + KGERD=KGE(as.numeric(simRD), as.numeric(OBSRD)) + KGERD +} + +# KGE seulement sur les jours de fonte unique # jours avec moins de 1 mm de precip lorsqu'il fait plus de 1?C # avec un d?bit sup?rieur ? 1,5 le d?bit moyen + +KGEwithoutRainyDayswithstrongDischarge <- function(sim,OBS,inputfolder,rainfilename,tempfilename,meltperiod) +{ + + sim=sim[which(yday(sim)%in%meltperiod)] + OBS=OBS[which(yday(OBS)%in%meltperiod)] + + sD=mean(OBS,na.rm=T)*1.5 + + Rain=ReadJ2000_Input(inputfolder,rainfilename) + Temp=ReadJ2000_Input(inputfolder,tempfilename) + + Rainmean=rowMeans(Rain) + Tempmean=rowMeans(Temp) + + RainyDays=which((Rainmean>1 & Tempmean>1)) + RainyDaysp1=RainyDays+1 + RainyDaysp2=RainyDays+2 + + period=index(Rain) + + periodwRainyDays=period[-c(RainyDays, RainyDaysp1, RainyDaysp2) ] + simwRD=sim[periodwRainyDays,] + OBSwRD=OBS[periodwRainyDays,] + + simwRDsD=simwRD[which(OBSwRD>sD)] + OBSwRDsD=OBSwRD[which(OBSwRD>sD)] + + KGEwRDsD=KGE(as.numeric(simwRDsD), as.numeric(OBSwRDsD)) + KGEwRDsD +} + +# calc_stats +# - inclut KGE_rainy_days donc besoin de rainf et T°C files +# CAVEATS : ne fonctionne que pr 8 BV AEN +# CAVEATS : codage des noms et aires de BV en dur (pr calcul des biais absolu en mm/m2) + +calc_stats=function(simREF,OBS,inputfolder,rainfilename,tempfilename, bv, meltperiod, exp=NULL){ + library(hydroGOF) + mystat=NULL + NSEd=NSE(simREF,OBS) + KGEd=KGEday(simREF,OBS) + KGEm=KGEmonth(simREF,OBS) + KGEc=KGEcycle(simREF,OBS) + KGEwRD=KGEwithoutRainyDays(simREF,OBS,inputfolder,rainfilename,tempfilename,meltperiod) # memtpriod arvan ori : 04->07 + KGEwRDsD=KGEwithoutRainyDayswithstrongDischarge(simREF,OBS,inputfolder,rainfilename,tempfilename,meltperiod) + KGERD=KGERainyDays(simREF,OBS,inputfolder,rainfilename,tempfilename,meltperiod) + KGEmelt=KGEday(simREF[which(yday(simREF)%in%meltperiod),],OBS[which(yday(OBS)%in%meltperiod),]) + pBiais=pbias(simREF,OBS) # % + pBiais_melt=pbias(simREF[which(yday(simREF)%in%meltperiod),],OBS[which(yday(OBS)%in%meltperiod),]) # % + + # Surfaces des BV en m2 + + area=c(192280000,60840000,200360000,36560000,38720000,13932000,73120000,340640000) + names(area)=c("Arve_loc","Arvan_Amont_loc","Durance_Val_Pres_loc","Archiane_loc","Averole_loc","Issole_loc","Borne_loc","Doron_loc") + + #aBiais=(mean(simREF)-mean(OBS,na.rm=T))*3600*24*365.25/as.numeric(area[bv])*1000 # m/yr...-> IG : mm/yr. 7/11/2016 + # AU SECOURS !!! Maxime n'avait pas décompté les missing-val dans ses sim... + aBiais=me(simREF,OBS)*3600*24*365.25/as.numeric(area[bv])*1000 # mm(/m2)/yr + aBiais_melt=me(simREF[which(yday(simREF)%in%meltperiod),],OBS[which(yday(OBS)%in%meltperiod),])*3600*24*length(meltperiod)/as.numeric(area[bv])*1000 # mm(/m2)/yr + + KGEmoy=mean(c(KGEwRD,KGEwRDsD)) + + pdischarge_meltperiod_sim=mean(simREF[which(yday(simREF)%in%meltperiod),])*length(meltperiod)/mean(simREF)/365.25*100 # % + + mystat=rbind(mystat,c(KGEd,KGEm,KGEc,KGEwRD,KGEwRDsD,KGERD,KGEmelt,pBiais,aBiais,pBiais_melt, aBiais_melt,pdischarge_meltperiod_sim, KGEmoy,NSEd)) + + colnames(mystat)=c("KGEd","KGEm","KGEc","KGEwRD","KGEwRDsD","KGERD","KGEmelt","pBiais","aBiais","pBiais_melt","aBiais_melt","pdischarge_meltperiod_sim","KGEmoy",'NSEd') + rownames(mystat)=paste(bv,exp,sep='') + mystat +} + +# calc_stats_simple +# CAVEATS : ne fonctionne que pr 8 BV AEN +calc_stats_simple=function(simREF,OBS,bv, meltperiod, exp=NULL){ + library(hydroGOF) + mystat=NULL + NSEd=NSE(simREF,OBS) + KGEd=KGEday(simREF,OBS) + KGEm=KGEmonth(simREF,OBS) + KGEc=KGEcycle(simREF,OBS) + + KGEmelt=KGEday(simREF[which(yday(simREF)%in%meltperiod),],OBS[which(yday(OBS)%in%meltperiod),]) + pBiais=pbias(simREF,OBS) # % + pBiais_melt=pbias(simREF[which(yday(simREF)%in%meltperiod),],OBS[which(yday(OBS)%in%meltperiod),]) # % + + pdischarge_meltperiod_sim=mean(simREF[which(yday(simREF)%in%meltperiod),])*length(meltperiod)/mean(simREF)/365.25*100 # % + + mystat=rbind(mystat,c(KGEd,KGEm,KGEc,KGEmelt,pBiais,pBiais_melt,pdischarge_meltperiod_sim,NSEd)) + + colnames(mystat)=c("KGEd","KGEm","KGEc","KGEmelt","pBiais","pBiais_melt","pdischarge_meltperiod_sim",'NSEd') + rownames(mystat)=paste(bv,exp,sep='') + mystat +} + +# compute_critstat sur liste de BV et d'expériences +# -------------------------------------------------------------- +# - tous les bassins doivent avoir les expériences mentionnées. +# - inclut KGE_rainy_days donc besoin de rainf et T°C files +# CAVEATS : ne fonctionne que pr 8 BV AEN +# CAVEATS : codage des noms et aires de BV en dur (pr calcul des biais absolu en mm/m2) +# +# Critères : "KGEd","KGEm","KGEc","KGEwRD","KGEwRDsD","KGERD","KGEmelt","pBiais","aBiais","pBiais_melt","aBiais_melt","pdischarge_meltperiod_sim","KGEmoy" +# +# shortnames qualifie 'exp' (ex : "SPZ_mail"); il permet de donner un nom plus court que 'exp' pour les légendes... +compute_critstat=function(statfile,experiencelist,pptlist,tmeanlist, bvlist, shortnames=NULL){ + + source("lib/MDR_utilitaires.r") + source("lib/readwrite_functions_J2000.R") + Sys.setenv(TZ='UTC') + library(zoo) + library(chron) + library(hydroGOF) + library(maptools) + library(gdata) + library(lubridate) + library(fmsb) + + + stat=NULL + myrownames=NULL + for (bv in bvlist ){ + for (exp in experiencelist){ + i=which(experiencelist==exp) + ppt=pptlist[i] + tt=tmeanlist[i] + + inputfolder=paste('~/JAMS/submodels/',bv,'/input/local/',sep='') + rainfilename=paste('rain_',ppt,'.dat',sep='') + tempfilename=paste('tmean_',tt,'.dat',sep='') + + simufile=paste('~/JAMS/submodels/',bv,'/output/',exp,'/',sep='') + filename='TimeLoop.dat' + meltperiod=seq(91,213) # DOY, from R2D2 : 1st april - 31 july. + # Original try : encompassing ~ half the year.seq(15,198) + # Exceptions : Issole and Archiane (1st jan -> 1st May) + + if(bv=="Arve_loc"){ + # - Arve à Chamonix Mont Blanc + StationsOI=c("V0002010") + NamesOI=c("L Arve a Chamonix-Mont-Blanc [Pont des Favrands]") + } + if(bv=="Arvan_Amont_loc"){ + # - Arvant Amont ? Saint-Jean-D'Arves + StationsOI=c("W1055020") + NamesOI=c("L Arvan a Saint-Jean-d Arves [La Villette]") + } + if(bv=="Durance_Val_Pres_loc"){ + # La Durance e Val-des-Pres [Les Alberts] + StationsOI=c("X0010010") + NamesOI=c("La Durance e Val-des-Pres [Les Alberts]") + } + if(bv=="Archiane_loc"){ + # - L Archiane ? Treschenu-Creyers [Men?e] + StationsOI=c("V4226010") + NamesOI=c("L Archiane ? Treschenu-Creyers [Men?e]") + meltperiod=seq(1,120) + } + if(bv=="Averole_loc"){ + # - L Averole a Bessans [Averole] + StationsOI=c("W1006010") + NamesOI=c("L Averole a Bessans [Averole]") + } + if(bv=="Issole_loc"){ + # - L Issole ? Saint-Andr?-les-Alpes [Mourefrey] + StationsOI=c("X2114010") + NamesOI=c("L Issole ? Saint-Andr?-les-Alpes [Mourefrey]") + meltperiod=seq(1,120) + } + if(bv=="Borne_loc"){ + # - Le Borne ? Saint-Jean-de-Sixt + StationsOI=c("V0205420") + NamesOI=c("Le Borne ? Saint-Jean-de-Sixt") + } + if(bv=="Doron_loc"){ + # - Le Doron de Bozel ? la Perri?re [Vignotan] + StationsOI=c("W0224010") + NamesOI=c("Le Doron de Bozel ? la Perri?re [Vignotan]") + } + deb = '1987-01-01' + fin = '2012-12-31' + OBS=treatobs(StationsOI) + REF=ReadTimeLoopXTS(simufile,filename) + simREF=treatsimu_exutoire(REF) + + mystat=calc_stats(simREF,OBS,inputfolder,rainfilename,tempfilename, bv,meltperiod, shortnames[i]) + tf=as.numeric(system(paste('grep t_factor ~/JAMS/submodels/',bv,'/output/',exp,'/model.jmp | cut -d"=" -f2',sep=""),intern=TRUE)) + + a_asp=as.numeric(system(paste('grep a_asp ~/JAMS/submodels/',bv,'/output/',exp,'/model.jmp | cut -d"=" -f2',sep=""),intern=TRUE)) + if (length(a_asp)==0) {a_asp=0} + + Amp=as.numeric(system(paste('grep Amplitude ~/JAMS/submodels/',bv,'/output/',exp,'/model.jmp | cut -d"=" -f2',sep=""),intern=TRUE)) + if (length(Amp)==0) {Amp=0} + + mystat=cbind(mystat, tf, a_asp, Amp) + + stat=rbind(stat,mystat) + } + } + write.csv(stat,statfile) + return(stat) +} + +# radarcharts +# --------------------------------------------------------------------------- +plot_radarchart=function(stat, expnames=NULL, legend=NULL, title=NULL){ # stat matrix, rows=exp with names, col=statcriteria, legend=boolean: Y or N + radarcrit=cbind(1-abs(stat[,"pBiais"]/100.), stat[,"KGEd"], stat[,"KGEm"], stat[,"KGEc"], stat[,"KGEwRD"], stat[,"KGEwRDsD"], stat[,"KGERD"], stat[,"KGEmelt"], 1-abs(stat[,"pBiais_melt"]/100.)) + radarcrit=apply(radarcrit,c(1,2), function(x) if(x<=0) 0 else x) + colnames(radarcrit)=c('Bilan', 'KGEd', 'KGEm', 'KGEc', "KGEwRD","KGEwRDsD","KGERD","KGEmelt", 'Bilan Fonte') + rownames(radarcrit)=rownames(stat) + radarcrit=as.data.frame(radarcrit) + radarcrit=rbind(rep(1,8),rep(0,8), radarcrit) + + if(!is.null(legend)){layout(matrix(c(1:2),1,2))} + + if(dim(stat)[1]==3){ + colors_border=c( rgb(0.7,0.5,0.1,0.9), rgb(0.8,0.2,0.5,0.9) , rgb(0.2,0.5,0.5,0.9)) + colors_in=c( rgb(0.7,0.5,0.1,0.4), rgb(0.8,0.2,0.5,0.4) , rgb(0.2,0.5,0.5,0.4) ) + colors_border=c(rgb(0.2,0.5,0.5,0.9), 'deepskyblue', 'blue') + colors_in=c(rgb(1,1,1,0),rgb(1,1,1,0),rgb(1,1,1,0)) + radarchart(radarcrit, plwd=1, plty=1, cglcol="grey", cglty=1, axislabcol="grey", caxislabels=seq(0,1,0.25), cglwd=0.8, axistype=1, cex=0.3, calcex=.6, pcol=colors_border, pfcol=colors_in, vlcex=.7, title=title) + if(!is.null(legend)){ + plot.new() + if(!is.null(expnames)){ + legend(x=0.1, y=0.7, legend = expnames, bty = "n", pch=20 , col=colors_border, cex=1, pt.cex=4)} else{ + legend(x=0.1, y=0.7, legend = rownames(stat), bty = "n", pch=20 , col=colors_border, cex=1, pt.cex=4) + } + } + } else { + + radarchart(radarcrit, plwd=1, plty=1, cglcol="grey", cglty=1, axislabcol="grey", caxislabels=seq(0,1,0.25), cglwd=0.8, axistype=1, cex=0.5, calcex=.6, vlcex=.7,title=title) + plot.new() + if(!is.null(expnames)){ + legend(x=0.1, y=0.7, legend = expnames, bty = "n", pch=20 , col=c(1:dim(stat)[1]), cex=1, pt.cex=4)} else{ + legend(x=0.1, y=0.7, legend = rownames(stat), bty = "n", pch=20 , col=c(1:dim(stat)[1]), cex=1., pt.cex=4) + } + } +} + +# ceux du rapport AEN +plot_radarchart_simple=function(stat, expnames=NULL, legend=NULL, title=NULL){ # stat matrix, rows=exp with names, col=statcriteria, legend=boolean: Y or N + radarcrit=cbind(1-abs(stat[,"pBiais"]/100.), stat[,"KGEd"], stat[,"KGEm"], stat[,"KGEc"], stat[,"KGEmelt"], 1-abs(stat[,"pBiais_melt"]/100.)) + radarcrit=apply(radarcrit,c(1,2), function(x) if(x<=0) 0 else x) + colnames(radarcrit)=c('Bilan', 'KGEd', 'KGEm', 'KGEc',"KGEmelt", 'Bilan Fonte') + rownames(radarcrit)=rownames(stat) + radarcrit=as.data.frame(radarcrit) + radarcrit=rbind(rep(1,8),rep(0,8), radarcrit) + + #if(!is.null(legend)){layout(matrix(c(1:2),1,2))} + + if(dim(stat)[1]==3){ + colors_border=c( rgb(0.7,0.5,0.1,0.9), rgb(0.8,0.2,0.5,0.9) , rgb(0.2,0.5,0.5,0.9)) + colors_in=c( rgb(0.7,0.5,0.1,0.4), rgb(0.8,0.2,0.5,0.4) , rgb(0.2,0.5,0.5,0.4) ) + #colors_border=c(rgb(0.2,0.5,0.5,0.9), 'deepskyblue', 'blue') + #colors_in=c(rgb(1,1,1,0),rgb(1,1,1,0),rgb(1,1,1,0)) + radarchart(radarcrit, plwd=2, plty=1, cglcol="grey", cglty=1, axislabcol="grey", caxislabels=seq(0,1,0.25), cglwd=2, axistype=1, cex=3, calcex=.6, pcol=colors_border, pfcol=colors_in, vlcex=1.1, title=title, cex.title=2) + # if(!is.null(legend)){ + # plot.new() + # if(!is.null(expnames)){ + # legend(x=0.1, y=0.7, legend = expnames, bty = "n", pch=20 , col=colors_border, cex=1, pt.cex=4)} else{ + # legend(x=0.1, y=0.7, legend = rownames(stat), bty = "n", pch=20 , col=colors_border, cex=1, pt.cex=4) + # } + # } + } else { + + radarchart(radarcrit, plwd=1, plty=1, cglcol="grey", cglty=1, axislabcol="grey", caxislabels=seq(0,1,0.25), cglwd=1.2, axistype=1, cex=3, calcex=.6, vlcex=1.1,title=title) + #plot.new() + # if(!is.null(expnames)){ + # legend(x=0.1, y=0.7, legend = expnames, bty = "n", pch=20 , col=c(1:dim(stat)[1]), cex=1, pt.cex=4)} else{ + # legend(x=0.1, y=0.7, legend = rownames(stat), bty = "n", pch=20 , col=c(1:dim(stat)[1]), cex=1., pt.cex=4) + # } + } +} + + + +# write_dbf_performancefile +#~------------------------------------------------------------------------------- +# Ecrit le .dbf des NSE et MB (Mean Bias) du modèle aux stations MDR retenues +# qui sont dans ~/DATA/SIG_MDR/Perf_modele/Stations_Rhone_retenues_Sim_Ref.dbf +#~------------------------------------------------------------------------------- +write_dbf_performancefile=function(StationsOI, simREF, OBS, performancefile){ + library(foreign) + + dbf=read.dbf('~/DATA/SIG_MDR/Perf_modele/Stations_Rhone_retenues_Sim_Ref.dbf') + + NSE=NULL + MB=NULL + for (i in seq(1,length(StationsOI),1)){ + NSE_tmp=Nash(simREF[,i], OBS[,i]) + MB_tmp=Bias(simREF[,i], OBS[,i]) + NSE=c(NSE,NSE_tmp) + MB=c(MB,MB_tmp) + } + myorder=match(dbf$CODE,StationsOI) + mydbf=data.frame(dbf$CODE,dbf$NOM, dbf$S_BH, NSE[myorder], MB[myorder]) + names(mydbf)=c('CODE','NOM','S_BH','NSE','MB') + write.dbf(mydbf,paste(performancefile, '.dbf', sep="")) + + for (ext in c('.prj', '.qml', '.shp', '.qpj', '.shp', '.shx')){ + system(paste('cp ~/DATA/SIG_MDR/Perf_modele/Stations_Rhone_retenues_Sim_Ref', ext,' ',performancefile,ext, sep="")) + + } + +} + + +##################################################################################### +# CHECK BILAN HYDRO J2000 +##################################################################################### + +#~* PROGRAMMER: Isabelle GOUTTEVIN (Irstea Lyon) +#~****************************************************************************** +#~* CREATED/MODIFIED: Created 10-02-2016 +#~****************************************************************************** + + +# fonctionnementHydro +# ----------------------------------------------------------------------------- +#~* IN : - sim (le produit d'un ReadTimeLoopDaily) +#~* - StationsOI (une seule) +#~* - Reaches (un seul) +#~* +#~* OUT : - variables (zoo object, cycle mensuel) pr comprendre le fonctionnement hydro du bassin +#~* =c('precip','actET','snowTotSWE','reachOutRD1','reachOutRG1','reachOutRG1') +#~* =[varOI_h1, ..., varOI_r3] +#~* t1 t1 +#~* ... ... +#~* t12 t12 +#~* units =(mm/d, mm/d, mm/d, mm, mm/d, mm/d, mm/d) +# ----------------------------------------------------------------------------- +# définition des variables d'intérêt +# -h : sur hru ou bassin +# -r : sur reach +fonctionnementHydro=function(sim,StationsOI,Reaches){ + varOI_h=c('precip','actET','snowTotSWE') # hrus (mm,mm,mm) + varOI_r=c('reachOutRD1','reachOutRD2','reachOutRG1', 'simRunoff') # reaches (L/d, L/d, L/d) + coeff_normalisation=c(1,1,1,1./Area, 1./Area,1./Area, 1./Area ) # (all : mm/d or mm) + + # cycle annuel sur ces variables + #----------------------------------- + # - result : variables=[varOI_h1, ..., varOI_r3] + # t1 t1 + # ... ... + # t12 t12 + variables=zoo(NULL) + for (var in varOI_h){ + vareff=paste(StationsOI,var,sep="") + indexe=match(vareff,names(sim)) + chronique=sim[,indexe] + annualcycle=aggregateZoo(chronique, "m", mean) + variables=merge(variables,annualcycle) + } + for (var in varOI_r){ + vareff=paste(var,'_',Reaches, sep="") + indexe=match(vareff,names(sim)) + chronique=sim[,indexe] + annualcycle=aggregateZoo(chronique, "m", mean) + variables=merge(variables,annualcycle) + } + + # normalisation + for (i in (1:dim(variables)[2])){variables[,i]=variables[,i]*coeff_normalisation[i]} + + names(variables)=c(varOI_h,varOI_r) + + return(variables) +} + +# checkBilanHRU +# ------------------------------------------------------------------------------ +#~* vérifie le bilan hydrologique d'une HRU +#~* +#~* IN : - hruloopdir: '/home/..../' where HRULoop.dat is located +#~* - numhru: numéro de la HRU dont on veut le bilan +#~* +#~* OUT : (in)-(out) [mm/m2/yr]. si >0 : le bassin "crée" de l'eau (ou n'en évacue pas assez) +#~* si <0 : le bassin perd de l'eau +#~* +#~* CAVEAT1 : valable seulement en hydrologie naturelle +#~* CAVEAT2 : valable entre une fin et un début de simu où les réservoirs (sol, SWE) ont été initialisés à 0. +#~* CAVEAT3 (à vérifier) : que se passe-t-il si un reach traverse la HRU ? +# ------------------------------------------------------------------------------ +checkBilanHRU=function(hruloopdir, numhru){ + + hruloop=ReadLoopDaily(hruloopdir,'HRULoop.dat', FALSE) + myhru=hruloop$Data[which(hruloop$Data[,1]==numhru),] + myhru=xts(myhru[, c('outRD1','precip','snowTotSWE','actLPS','actMPS', 'actRG1', 'actRG2','actET','outRD2','outRG1','outRG2')],as.POSIXct(hruloop$dates,format='$Y-%m-%d')) + myarea=unique(hruloop$Data[which(hruloop$Data[,1]==numhru),'area']) + N=length(myhru[,1]) + + IN=sum(myhru$precip) + OUT=sum(myhru$actET+myhru$outRD1+myhru$outRD2+myhru$outRG1)/myarea + deltaSWE=myhru$snowTotSWE[N]/myarea + res_soil=(myhru$actRG1+myhru$actRG2+myhru$actMPS+myhru$actLPS)/myarea + deltaSoilRes=res_soil[N] + + Bilan=IN-OUT-deltaSWE-deltaSoilRes #(mm) + return(as.numeric(Bilan)) +} + +# checkBilanSubmodel +# ------------------------------------------------------------------------ +checkBilanSubmodel=function(dir){ + + tloop=ReadTimeLoopIG(dir,'TimeLoop.dat') # 24 variables + hruloop=ReadLoopDaily(dir,'HRULoop.dat', FALSE) + + ids=unique(hruloop$Data[,'ID']) + N=length(ids) + areas=hruloop$Data[,'area'][1:N] + myarea=sum(areas) # m2 + + window='1990-01-01/2012-12-31' # 5 year-spin-up + date1='1990-01-01' + date2='2012-12-31' + + var_station=c('precip','actET','actMPS','actLPS','actRG1','intercStorage','snowTotSWE') + var_reach=c('reachStation', 'channelStorage') + + for (var in var_station){ + assign(var,tloop[window,var]) + } + for (var in var_reach){ + assign(var,tloop[window,var]/myarea) # transfo en mm/m2 + } + + Storage=actMPS+actLPS+actRG1+channelStorage+intercStorage+snowTotSWE # mm/m2 + names(Storage)='Storage' + DeltaStorage=as.numeric(Storage[date2])-as.numeric(Storage[date1]) # RQ : pas de Storage[1] car c'est déja le t-step1 ET les Storage sont nuls au début de la simu + + Err_Bilan=sum(precip)-sum(reachStation)-sum(actET)-DeltaStorage #mm/m2 + + out=c(sum(precip),sum(actET),sum(reachStation),DeltaStorage, Err_Bilan ,actMPS[date2],actLPS[date2],actRG1[date2],channelStorage[date2],intercStorage[date2],snowTotSWE[date2]) + names(out)=c('P (mm/m2)','actET (mm/m2)','Q(mm/m2)', 'DeltaStorage(mm/m2)', 'Err_Bilan (mm/m2)', 'actMPS', 'actLPS','actRG1','channelStorage','intercStorage','snowTotSWE') + + Nbyears=year(date2)-year(date1)+1 + out=out/Nbyears + + return(out) +} + +# checkBilanLoop (hruLoop) +# ------------------------------------------------------------------------- +checkBilanLoop=function(dir, station, brin, hrusinloop){ + + tloop=ReadTimeLoopDaily(dir,'TimeLoop.dat') # 24 variables + hruloop=ReadLoopDaily(dir,'HRULoop.dat', FALSE) + + hruloop1=hruloop$Data[which(hruloop$Data[,'ID']%in%hrusinloop),] + hruloop2=unique(hruloop1) + myarea=sum(hruloop2[,'area']) + + var_station=c('precip','actET','actMPS','actLPS','actRG1','intercStorage','snowTotSWE') + var_reach=c('simRunoff', 'channelStorage') + + for (var in var_station){ + varname=paste(station,var,sep="") + assign(var,tloop[,varname]) + } + for (var in var_reach){ + varname=paste(var,'_',brin,sep="") + assign(var,tloop[,varname]/myarea) # transfo en mm/m2 + } + + tend=dim(tloop)[1] #-1 temporaire + Storage=actMPS+actLPS+actRG1+channelStorage+intercStorage+snowTotSWE # mm/m2 + names(Storage)='Storage' + DeltaStorage=as.numeric(Storage[tend]) # RQ : pas de Storage[1] car c'est déja le t-step1 ET les Storage sont nuls au début de la simu + + Err_Bilan=sum(precip)-sum(simRunoff)-sum(actET)-DeltaStorage #mm/m2 + + out=c(sum(precip),sum(actET),sum(simRunoff),DeltaStorage, Err_Bilan ,actMPS[tend],actLPS[tend],actRG1[tend],channelStorage[tend],intercStorage[tend],snowTotSWE[tend]) + names(out)=c('P (mm/m2)','actET (mm/m2)','Q(mm/m2)', 'DeltaStorage(mm/m2)', 'Err_Bilan (mm/m2)', 'actMPS', 'actLPS','actRG1','channelStorage','intercStorage','snowTotSWE') + + return(out) +} + +##################################################################################### +# MODIF de FICHIERS de PARAM' de J2000 +##################################################################################### + +# écriture d'un fichier de params modifié en utilisant le header de l'ancien -- NTC +# ---------------------------------------------------------------- +write_new_paramfile=function(oldfile, newvalues ,newfile){ + # récupération du header + nbLines = Lignes_saut1(oldfile) + header = readLines(oldfile, n = nbLines) + # écriture + write.table(header,newfile,sep='\t',col.names=F,row.names=F,quote=F) + write.table(newvalues,newfile,col.names=F,row.names=F,quote=F,append=T,sep='\t') +} + + +# Nombre de lignes du header d'un fichier de paramètre de J2000 +# ---------------------------------------------------------------- +Lignes_saut1 = function(file){ + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + while (length(na.omit(obj)) == 0 | length(na.omit(obj2)) == 0 | length(na.omit(obj3)) == 0) { + + obj <- as.numeric(read.table(file, nrow = 1, skip = k, colClasses = "character"))[1] + obj2 <- as.numeric(read.table(file, nrow = 1, skip = k + 1, colClasses = "character"))[1] + obj3 <- as.numeric(read.table(file, nrow = 1, skip = k + 2, colClasses = "character"))[1] + + k <- k + 1 + } + return(k - 1) +} + + +# **** ajout d'un paramètre supplémentaire au reach.par +# ---------------------------------------------------------------- +# -- NTK +add_param = function(inputdir,oldreachfile,newreachfile,newparamName,newparamVal,newparamUnit){ + + nbLines = Lignes_saut(inputdir,oldreachfile) + headerReach = readLines(paste(inputdir,oldreachfile,sep=''), n = nbLines) + LinesNames = which(substr(headerReach,1,2)=="ID") + Names = read.table(paste(inputdir,oldreachfile,sep=''),nr=1,skip=LinesNames-1) + Names = cbind(Names,newparamName) + Min = read.table(paste(inputdir,oldreachfile,sep=''),nr=1,skip=LinesNames) + Min = cbind(Min,0,0) + Max = read.table(paste(inputdir,oldreachfile,sep=''),nr=1,skip=LinesNames+1) + Max = cbind(Max,9999999,9999999) + Unit = read.table(paste(inputdir,oldreachfile,sep=''),nr=1,skip=LinesNames+2) + Unit = cbind(Unit,newparamUnit) + reach=Chargement_param(inputdir,oldreachfile) + reach = cbind(reach,newparamVal) + + write.table (Names,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=F) + write.table (Min,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) + write.table (Max,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) + write.table (Unit,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) + write.table (reach,paste(inputdir,newreachfile,sep=''),col.names=F, row.names=F,quote=F,sep='\t',append=T) +} + +# chargement d'un fichier hru.par ou reach.par +# ------------------------------------------------------------ +#Inputs : +# - chemin : le dossier dans lequel se situe le fichier desire +# - Name : le nom du fichier desire +#Output : les paramètres (tableau) avec les bons noms de colonne +# +# Astuce : +# - Le code est capable d'identifier la ligne avec les premieres valeurs et de sauter les lignes de texte initiales +# Caveats : ça risque de ne plus marcher pour des fichiers avec moins de 3 lignes de données. +# ---------------------------------------------------------- +# -- NTK +Chargement_param = function(chemin,Name){ + k=0 + obj = NULL + obj2 = NULL + obj3 = NULL + while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0){ + obj = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k,colClasses="character"))[1] + obj2 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+1,colClasses="character"))[1] + obj3 = as.numeric(read.table(paste(chemin,Name,sep=''),nrow=1,skip=k+2,colClasses="character"))[1] + k=k+1} + nbLines = k - 1 + data=read.table(paste(chemin,Name,sep=''),skip=nbLines) + mycolnames=apply(read.table(paste(chemin,Name,sep=''),nrow=1)[1,],1,as.character) + colnames(data)=mycolnames + return(data) +} + + +##################################################################################### +# IRRIGATION / AGRICULTURE +##################################################################################### + +# LU id -> culture +# ------------------- NTC +# vecteur des cultures dominantes à partir du vecteur des landuseID +luid2cult=function(vect_luid){ + cultures=c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") + numJ2000_cultures=c(19:31) + res=apply(as.matrix(vect_luid),2,function(X){cultures[match(X,numJ2000_cultures)]}) + as.vector(res) +} + +# Culture -> LU id +# ------------------- +# l'inverse : vecteur LUID dominant à partir du vecteur des cultures +cult2luid=function(vect_cult){ + cultures=c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") + numJ2000_cultures=c(19:31) + res=apply(as.matrix(vect_cult),2,function(X){numJ2000_cultures[match(X,cultures)]}) + as.vector(res) +} \ No newline at end of file diff --git a/irrigation-R-codes/lib_old/Soil_proportion_function_francois.r b/irrigation-R-codes/lib_old/Soil_proportion_function_francois.r new file mode 100644 index 0000000000000000000000000000000000000000..d030deebb9dbc64a5a532cc9cca317e1682d8787 --- /dev/null +++ b/irrigation-R-codes/lib_old/Soil_proportion_function_francois.r @@ -0,0 +1,87 @@ +Soil_proportion_function_table <- function(param_folder,Catchment,Name_subCatch,watershed,rewrite){ + +############################################################################ +# param_folder : folder where the parameter files are +# +# parameter files (hgeo, landuse and soils) need 2 columns : one with the index used in hrus.par and a second with the description of the parameter (format : parameter.csv with sep =";") ! +# Put the hrus.par file too in your folder! +# +# Catchment : Name of the catchment +# Name_subCatch : if you have sub-catchment, name of one sub-catchment, else NULL +# watershed : watershed corresponding to the sub-catchment if you have one, else NULL +# rewrite : if TRUE, the code is creating a new file +############################################################################ + + +#Read landuse description +param_landuse <- read.table(paste(param_folder,'landuse.csv',sep=''),sep=';') +#Read soils description +param_soil <- read.table(paste(param_folder,'soils.csv',sep=''),sep=';') +#Read hgeo description +param_hgeo <- read.table(paste(param_folder,'hgeo.csv',sep=''),sep=';') + +#Read HRU.par +hrus2 <- read.table(paste(param_folder,'hrus.par',sep=''),col.names=read.table(paste(param_folder,'hrus.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5) + + +if (length(Name_subCatch) != 0){ +if (length(watershed) != 0){ +#Extraction of the hrus of the sub-catchment +hrus <- NULL +NbWatershed <- length(watershed) +nb <- 0 +while(nb != NbWatershed){ +nb <- nb + 1 +hrus <- rbind(hrus,hrus2[(hrus2$watershed == watershed[nb]),]) +} +} +} else {hrus <- hrus2} + + +#Rewrite the file if necessary +if(rewrite){write.table(NULL,paste(param_folder,"Proportion_",Catchment,".csv",sep=""),col.names=F,row.names=F,quote=F,sep=';',append=F)} + + +slope <- sum(hrus[,2]*hrus[,4])/sum(hrus[2]*1) + +hgeo <- (hrus$hgeoID) +hgeo2 <- NULL +for (i in c (1:length(unique (hgeo)))){ +hgeo3 <- NULL +hgeo3 <- c(as.character(param_hgeo[param_hgeo[,1]==unique(hgeo)[i],2]),round(sum((hrus[hrus$hgeoID == unique(hgeo)[i],2])*1)/sum(hrus[,2]*1),2)) +hgeo2 <- rbind(hgeo2,hgeo3) +} +hgeo2 <- hgeo2[order(as.numeric(hgeo2[,2]),decreasing=TRUE),] + + + + + +soil <- (hrus$soilID) +soil2 <- NULL +for (i in c (1:length(unique (soil)))){ +soil3 <- NULL +soil3 <- c(as.character(param_soil[param_soil[,1]==unique(soil)[i],2]),round(sum((hrus[hrus$soilID == unique(soil)[i],2])*1)/sum(hrus[,2]*1),2)) +soil2 <- rbind(soil2,soil3) +} +soil2 <- soil2[order(as.numeric(soil2[,2]),decreasing=TRUE),] + + +landuse <- (hrus$landuseID) +landuse2 <- NULL +for (i in c (1:length(unique (landuse)))){ +landuse3 <- NULL +landuse3 <- c(as.character(param_landuse[param_landuse[,1]==unique(landuse)[i],2]),round(sum((hrus[hrus$landuseID == unique(landuse)[i],2])*1)/sum(hrus[,2]*1),2)) +landuse2 <- rbind(landuse2,landuse3) +} +landuse2 <- landuse2[order(as.numeric(landuse2[,2]),decreasing=TRUE),] + +Recap <- cbind(rbind(cbind('hgeo',if(length(dim(hgeo2))==0){t(hgeo2)}else{hgeo2}),cbind('soil',if(length(dim(soil2))==0){t(soil2)}else{soil2}),cbind('landuse',if(length(dim(landuse2))==0){t(landuse2)}else{landuse2}))) +colnames (Recap) <- c('param','value','Proportion') +Area_Catchment <- sum(as.numeric(hrus$area))/1000000 +rownames(Recap)=NULL + + +return(list(x = cbind(param=as.character(coredata(Recap[,1])),value=as.character(coredata(Recap[,2])),proportion = as.character(coredata(Recap[,3]))),Area = Area_Catchment)) +} + diff --git a/irrigation-R-codes/lib_old/Soil_proportion_function_library.r b/irrigation-R-codes/lib_old/Soil_proportion_function_library.r new file mode 100644 index 0000000000000000000000000000000000000000..b7f5e63eb66e2afc8fc284b2e949ca626b421cfc --- /dev/null +++ b/irrigation-R-codes/lib_old/Soil_proportion_function_library.r @@ -0,0 +1,93 @@ +Soil_proportion_function <- function(param_folder,Catchment,Name_subCatch,watershed,rewrite){ + +############################################################################ +# param_folder : folder where the parameter files are +# +# parameter files (hgeo, landuse and soils) need 2 columns : one with the index used in hrus.par and a second with the description of the parameter (format : parameter.csv with sep =";") ! +# Put the hrus.par file too in yout folder! +# +# Catchment : Name of the catchment +# Name_subCatch : if you have sub-catchment, name of one sub-catchment, else NULL +# watershed : watershed corresponding to the sub-catchment if you have one, else NULL +# rewrite : if TRUE, the code is creating a new file +############################################################################ + + +#Read landuse description +param_landuse <- read.table(paste(param_folder,'landuse.csv',sep=''),sep=';') +#Read soils description +param_soil <- read.table(paste(param_folder,'soils.csv',sep=''),sep=';') +#Read hgeo description +param_hgeo <- read.table(paste(param_folder,'hgeo.csv',sep=''),sep=';') + +#Read HRU.par +hrus2 <- read.table(paste(param_folder,'hrus.par',sep=''),col.names=read.table(paste(param_folder,'hrus.par',sep=''),skip=1,nr=1,colClasses="character"),skip=5) + + +if (length(Name_subCatch) != 0){ +if (length(watershed) != 0){ +#Extraction of the hrus of the sub-catchment +hrus <- NULL +NbWatershed <- length(watershed) +nb <- 0 +while(nb != NbWatershed){ +nb <- nb + 1 +hrus <- rbind(hrus,hrus2[(hrus2$watershed == watershed[nb]),]) +} +} +} else {hrus <- hrus2} + + +#Rewrite the file if necessary +if(rewrite){write.table(NULL,paste(param_folder,"Proportion_",Catchment,".csv",sep=""),col.names=F,row.names=F,quote=F,sep=';',append=F)} + + +slope <- sum(hrus[,2]*hrus[,4])/sum(hrus[2]*1) + +hgeo <- (hrus$hgeoID) +hgeo2 <- NULL +for (i in c (1:length(unique (hgeo)))){ +hgeo3 <- NULL +hgeo3 <- c(as.character(param_hgeo[param_hgeo[,1]==unique(hgeo)[i],2]),round(sum((hrus[hrus$hgeoID == unique(hgeo)[i],2])*1)/sum(hrus[,2]*1),2)) +hgeo2 <- rbind(hgeo2,hgeo3) +} +hgeo2 <- hgeo2[order(as.numeric(hgeo2[,2]),decreasing=TRUE),] + + + + + +soil <- (hrus$soilID) +soil2 <- NULL +for (i in c (1:length(unique (soil)))){ +soil3 <- NULL +soil3 <- c(as.character(param_soil[param_soil[,1]==unique(soil)[i],2]),round(sum((hrus[hrus$soilID == unique(soil)[i],2])*1)/sum(hrus[,2]*1),2)) +soil2 <- rbind(soil2,soil3) +} +soil2 <- soil2[order(as.numeric(soil2[,2]),decreasing=TRUE),] + + +landuse <- (hrus$landuseID) +landuse2 <- NULL +for (i in c (1:length(unique (landuse)))){ +landuse3 <- NULL +landuse3 <- c(as.character(param_landuse[param_landuse[,1]==unique(landuse)[i],2]),round(sum((hrus[hrus$landuseID == unique(landuse)[i],2])*1)/sum(hrus[,2]*1),2)) +landuse2 <- rbind(landuse2,landuse3) +} +landuse2 <- landuse2[order(as.numeric(landuse2[,2]),decreasing=TRUE),] + +Recap <- cbind(rbind(cbind('hgeo',if(length(dim(hgeo2))==0){t(hgeo2)}else{hgeo2}),cbind('soil',if(length(dim(soil2))==0){t(soil2)}else{soil2}),cbind('landuse',if(length(dim(landuse2))==0){t(landuse2)}else{landuse2}))) +colnames (Recap) <- c('param','value','Proportion') + +#Write the name of the sub-catchment +write.table(rbind("---------------------------------------------------------------------",ifelse(length(Name_subCatch)!=0,Name_subCatch,Catchment),""),paste(param_folder,"Proportion_",Catchment,".csv",sep=""),col.names=F,row.names=F,quote=F,sep='\t',append=T) + + +write.table(Recap,paste(param_folder,"Proportion_",Catchment,".csv",sep=""),row.names=F,col.names=T,quote=F,sep=';',dec='.',append=T) + +# Add the catchment area in the file +Area_Catchment <- sum(as.numeric(hrus$area)) +write.table(rbind("",paste("Catchment area : ", round(Area_Catchment/1000000,0)," km2",sep="")),paste(param_folder,"Proportion_",Catchment,".csv",sep=""),col.names=F,row.names=F,quote=F,sep=';',append=T) + +} + diff --git a/irrigation-R-codes/lib_old/aggregateZoo_functions.r b/irrigation-R-codes/lib_old/aggregateZoo_functions.r new file mode 100644 index 0000000000000000000000000000000000000000..6134130ddb2527afa8a0f38755e0c6f6f88cedfe --- /dev/null +++ b/irrigation-R-codes/lib_old/aggregateZoo_functions.r @@ -0,0 +1,69 @@ +#~****************************************************************************** +#~*Aggregate zoo object at different time step +#~****************************************************************************** +#~* PROGRAMMER: Meriem Labbas, Irstea Lyon +#~****************************************************************************** +#~* CREATED/MODIFIED: Created February 2014 +#~****************************************************************************** +#~* CONTENTS +#~* 1. aggregateZoo +#~****************************************************************************** +#~* COMMENTS : ONLY FOR ZOO OBJECT WITH AN INDEX CREATED WITH THE CHRON FUNCTION +#~****************************************************************************** + +require(chron) +require(zoo) + + + +aggregateZoo <- function (z, timeStep, sumOrMeanFunction){ # NTC + #^****************************************************************************** + #^* IN + #^* 1. zoo object + #^* 2. type of time step aggregation : + #^* - "dmy"/"d" for daily aggregation, // 'd' add by IG + #^* - "my" for mensual aggregation, + #^* - "m" for interannual mensual aggregation, + #^* - "sy" for seasonnal aggregation, + #^* - "s" for interannual seasonnal aggregation + #^* 3. type of aggregation : "mean" or "sum" + #^* OUT + #^* 1. zoo object + #^****************************************************************************** + + if(timeStep == "h"){ + return (aggregate(z, function(x) trunc(x, "01:00:00"), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "dmy"){ + return (aggregate(z, time(z) - as.numeric(time(z)) %% 1, sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "d"){ + return (aggregate(z, as.Date(as.POSIXct(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "my"){ + return (aggregate(z, as.Date(as.yearmon(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "y"){ + return (aggregate(z, format(as.Date(index(z)), '%Y'), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "m"){ + return (aggregate(z, format(as.Date(index(z)), '%m'), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "sy"){ + return (aggregate(z, as.Date(as.yearqtr(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "s"){ + return (aggregate(z, quarters(time(z)), sumOrMeanFunction,na.rm = TRUE)) + } + + print("Type not understood") + +} + + diff --git a/irrigation-R-codes/lib_old/criteria_functions.R b/irrigation-R-codes/lib_old/criteria_functions.R new file mode 100644 index 0000000000000000000000000000000000000000..df819e1aff0ecf64162cb81e8b04d083b26aab00 --- /dev/null +++ b/irrigation-R-codes/lib_old/criteria_functions.R @@ -0,0 +1,154 @@ +#### Criteria +#### This file contains functions for calculating validation criteria on runoff for a hydrological model (fixed time step!!) + +### Function Nash +### function that computes Nash criterion of a time series +## Inputs: +### sim = univariate zoo object containing simulatedRunoff (any fixed time step) +### obs = univariate zoo object containing obsRunoff (any fixed time step) +Nash <- function(sim, obs) +{ + # Calculate Nash + Nash <- 1 - sum(((sim - obs))^2, na.rm = TRUE)/ + sum(((obs- mean(obs, na.rm = TRUE)))^2, na.rm = TRUE); + + Nash +} + +### Function sqrtNash +### function that computes the sqrt Nash criterion of a time series +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +sqrtNash <- function(sim, obs) +{ + # Calculate Nash + Nash <- 1 - sum(((sqrt(sim) - sqrt(obs))^2), na.rm = TRUE)/ + sum(((sqrt(obs)- mean(sqrt(obs),na.rm=TRUE))^2), na.rm = TRUE); + + Nash +} + +### Function logNash +### function that computes the log Nash criterion of a time series (adds a small value to all Q values to avoid log(0) problems) +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +### value to add to all the discharges to avoid problems (typically meanannualflow / 40) +### be careful the value chosen might slightly change the value of the Nash criterion +logNash <- function(sim, obs, value) +{ + # Calculate Nash + Nash <- 1 - sum(((log(sim+value) - log(obs+value))^2), na.rm = TRUE)/ + sum(((log(obs+value)- mean(log(obs+value),na.rm=TRUE))^2), na.rm = TRUE); + + Nash +} + + +### Function bias +### function that computes simulation bias in % +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +Bias <- function(sim,obs) +{ + # Calculate bias in % + Bias <- sum((sim - obs), na.rm = TRUE)/ + sum(obs, na.rm = TRUE)*100; + +Bias +} + +### Function abias +### function that computes absolute simulation bias in % +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +aBias <- function(sim,obs) +{ + # Calculate bias in % + Bias <- sum(abs(sim - obs), na.rm = TRUE)/ + sum(obs, na.rm = TRUE)*100; + +Bias +} + +### Function RMSE +### function that computes root mean square error +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +RMSE <- function(sim,obs) +{ + # Calculate variance + var <- sum((sim - obs)^2, na.rm = TRUE)/ + length(sim); + # RMSE is the square root of variance + sqrt(var) +} + +### Function corr +### function that computes???? I don't really know, I have to work on this a little more +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +corr <- function(sim,obs) +{ + # Calculate correlation + cor(sim, obs, use="complete.obs"); + +} + + +### Function quantile +### function that gives quantiles 10 and 90 for sim and obs +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +quant <- function(sim) +{ + # Calculate quantiles + quant_sim <- quantile(na.omit(sim),probs=c(0.1,0.9)) + return(quant_sim) +} + + +NashRel <- function(sim, obs) +{ + # Calculate Nash + NashRel <- 1 - sum(((sim - obs)/(obs+0.01*mean(obs, na.rm = TRUE)))^2, na.rm = TRUE)/ + sum(((obs- mean(obs, na.rm = TRUE))/mean(obs+0.01*mean(obs, na.rm = TRUE), na.rm = TRUE))^2, na.rm = TRUE); + NashRel +} + +NashInv <- function(sim, obs) +{ + # Calculate Nash + InvSim <- 1/(sim+0.01*mean(obs, na.rm = TRUE)); + InvObs <- 1/(obs+0.01*mean(obs, na.rm = TRUE)); + NashInv <- 1 - sum((InvSim - InvObs)^2, na.rm = TRUE)/sum((InvObs- mean(InvObs, na.rm = TRUE))^2, na.rm = TRUE); + NashInv +} + + +### Function SetOfCriteria +### function that combines several functions above +### Nash, sqrtNash, logNash, bias, abias, RMSE +### Input +### sim = univariate zoo object containing simulatedRunoff (any time step, runoff in m3/s) +### obs = univariate zoo object containing obsRunoff (any time step, runoff in m3/s) +### value to add to all the discharges to avoid problems (typically meanannualflow / 40) +SetOfCriteria <- function(sim, obs, value) +{ + # Calculate Nash + Nash <- Nash(sim,obs) + NashInv <- NashInv(sim,obs) + sqrtNash <- sqrtNash(sim,obs) + logNash <- logNash(sim,obs,value) + NashRel <- NashRel(sim,obs) + RMSE <- RMSE(sim,obs) + Bias <- Bias(sim,obs) + aBias <- aBias(sim,obs) + quant_sim <- quant(sim) + quant_obs <- quant(obs) + R10_R90 <- quant_sim/quant_obs + QmeanSim <- mean(sim) + QmeanObs <- mean(obs,na.rm=TRUE) # return a vector combining all these results + QmaxSim <- max(sim) + QmaxObs <- max(obs,na.rm=TRUE) + + c(Nash,NashInv,sqrtNash,logNash,NashRel,RMSE , Bias, aBias,quant_sim,quant_obs,R10_R90,QmeanSim,QmeanObs,QmaxSim,QmaxObs) +} diff --git a/irrigation-R-codes/lib_old/readwrite_functions_J2000.R b/irrigation-R-codes/lib_old/readwrite_functions_J2000.R new file mode 100644 index 0000000000000000000000000000000000000000..507b368d9507a9956f85059e8b429a26b838e022 --- /dev/null +++ b/irrigation-R-codes/lib_old/readwrite_functions_J2000.R @@ -0,0 +1,582 @@ +### function ReadObs: reads JAMS input files for observed time series + +# this function reads input time series files for JAMS/J2000 model +# and organize the data as a univariate or multivariate zoo object +# for daily time step only at the moment + +ReadObs <- function(folder, file) +{ + + # packages required: zoo and chron + require(chron) + require(zoo) + + # read the file (skip the file header) + Data <- read.table(paste(folder,file,sep=""), header = F, sep = "", skip = 16, as.is=TRUE, comment.char = "#", na.strings = "-9999") + # read the line # 10 to get the obs names + Names <- scan(paste(folder,file,sep=""), what = "", nlines = 1, sep = "", skip = 9) + # Remove first value + Names <- Names[-1] + # Here we only keep the dates (no times) + date <- chron(Data$V1, format="d.m.y") + + # Remove the date-time columns from the dataset + Data <- Data[,-c(1,2)] + + # create the zoo object and name the columns + Zoodata <- zoo(Data, date) + names(Zoodata) <- Names + + # Return zoodata + Zoodata +} + +### function ReadObs_header: reads headers of JAMS input files for observed time series + +# this function reads the header of an input time series file for JAMS/J2000 model +# and writes a header.txt file in the same directory + +ReadObs_header <- function(folder, inputfile, headerfile) +{ + # Read the header = first 16 lines of the file + header <-readLines(paste(folder,inputfile,sep=""), n = 16) + + # write it as a text file + # Open the connection + filename <-paste(folder,headerfile,sep="",collapse="") + con <- file(filename,"w") + # Write the header and close connection + writeLines(header,con=con) + close(con) +} + +### Function ReadTimeLoop **** deprecated **** +### old TimeLoop function used for J2KYzeron daily; reads and extracts the following variables: +## rain, PotET and ActET as catchment averages (mm/j) +## saturation of soil storages MPS et LPS (%) +## 4 flow components at the outlet (m3/s) +## simulated and observed discharge at the outlet (m3/s) +## export as zoo multivariate object +## area = ugly patch to correct problem in unit of flow contribs + +ReadTimeLoop <- function(folder, filename, area) +{ + + # packages required: zoo and chron + require(zoo) + require(chron) + + # Read file + Data <- read.table(paste(folder,filename,sep=""), as.is = TRUE, sep = "", skip = 9, comment.char = "@", na.strings = "-9999.0") + # Caution columns V16 to V19 are contribs in mm/day -> to convert back in m3/s + # Ungly patch (to be removed as soon as the model is corrected) + Data$V16 <- Data$V16*area/(1000*24*3600) + Data$V17 <- Data$V17*area/(1000*24*3600) + Data$V18 <- Data$V18*area/(1000*24*3600) + Data$V19 <- Data$V19*area/(1000*24*3600) + + # Extract only the interesting columns + Result <- cbind(Data$V3, Data$V4, Data$V5, Data$V9, Data$V10,Data$V16,Data$V17,Data$V18,Data$V19, Data$V21, Data$V22) + + # Extract the colnames and store them in a vector + Names <- scan(paste(folder,filename,sep=""), what = "", nlines = 1, sep = "", skip = 5) + Colnames <- c(Names[2:4], Names[8:9], Names[15:18], Names[20:21]) + + # Transform into zoo object + # Get the date and et transform into chron object + chrondates <- chron(dates = Data$V1, format="y-m-d") + Zoo <- zoo(Result, chrondates) + # Zoo <- zoo(Result, as.Date(Data$V1)) + # Set the colnames + names(Zoo) <- Colnames + + # Return the object + Zoo +} + +### Function ReadTimeLoopDaily +## This function reads the J2K output TimeLoop file +## returns all the data (no selection) as a zoo multivariate object +## no unit conversions or anything +## works for daily time step only + +ReadTimeLoopDaily <- function(folder, filename) +{ + + # packages required: zoo and chron + require(zoo) + require(chron) + + # Read file + Data <- read.table(paste(folder,filename,sep=""), as.is = TRUE, sep = "", skip = 9, comment.char = "@", na.strings = "-9999.0") + + # Select values from the third colmun (the first and second columns contain the dates and hours) + Result <- Data[,3:dim(Data)[2]] + + # Extract the colnames and store them in a vector + Names <- scan(paste(folder,filename,sep=""), what = "", nlines = 1, sep = "", skip = 5) + # Remove the first name (ID) which is not useful + Colnames <- Names[-1] + + # Transform into zoo object + # Get the date and et transform into chron object + chrondates <- chron(dates = Data$V1, format="y-m-d") + Zoo <- zoo(Result, chrondates) + # Zoo <- zoo(Result, as.Date(Data$V1)) + # Set the colnames + names(Zoo) <- Colnames + + # Return the object + Zoo +} + +### Function ReadTimeLoopHourly +## This function reads the J2K output TimeLoop file at hourly time step +## returns all the data (no selection) as a zoo multivariate object +## no unit conversions or anything +## author: Meriem Labbas modified by F. Branger +ReadTimeLoopHourly <- function (folder,filename){ + # packages required: zoo and chron + require(zoo) + require(chron) + #^****************************************************************************** + #^* IN + #^* 1. folder : where is the file timeloop.dat or time.dat + #^* 2. the name of the file : time.dat or timeLoop.dat + #^* OUT + #^* 1. zoo object + #^****************************************************************************** + # Get the variable names + names <- scan(paste(folder,filename,sep=""), what = "", nlines = 1, sep = "", skip = 5) + # Remove the first name (ID) which is not useful and add the Date and Time names + names <- names[-1] + names <- c("Date", "Time", names) + + # Read the file + Data <- read.table(paste(folder,filename,sep=""), col.names = names, as.is = TRUE, sep = "", skip = 9, na.strings = "-9999", comment.char = "@") + + # Select values from the third colmun (the first and second columns contain the dates and hours) + Values <- Data[,3:dim(Data)[2]] + + # Get the date and hours and convert them to chron + Date <- as.character(Data[,"Date"]) + # Get the time and convert it to hh:mm:ss + Time <- as.character(Data[,"Time"]) + Time <- paste(Time,":00",sep="") + # Create the chron dates-times series + chron <- chron (Date,Time,format = c(dates = "y-m-d", times = "h:m:s")) + + # Build the zoo vector of data + Zoo <- zoo(Values,chron) + + return(Zoo) +} + +### Function ReadReachExtraction +# this function reads reach output data for a single reach (extracted using JADE) and returns a zoo multivariate object +# the file format is easy +# comment character # +# headers on line 2 + +ReadReachExtraction <- function(folder, filename) +{ + # packages required: zoo and chron + require(zoo) + require(chron) + + # Read file + temp <- read.table(paste(folder,filename,sep=""), as.is = TRUE, skip = 3, header =F, sep = "", comment.char = "#", na.strings = "-9999.0") + + # Extract only interesting data + Result <- cbind(temp$V3, temp$V4, temp$V9, temp$V10,temp$V11,temp$V12) + + # Extract the colnames and store them in a vector + Names <- scan(paste(folder,filename,sep=""), what = "", nlines = 1, sep = "", skip = 1) + Colnames <- c(Names[2:3], Names[8:11]) + + # Transform in zoo object + chrondates <- chron(temp$V1, format="y-m-d") + Zoo <- zoo(Result, chrondates) + # Zoo <- zoo(Result, as.Date(temp$V1)) + # Set colnames + names(Zoo) <- Colnames + + # Convert flow values L/day -> m3/s + Zoo$simRunoff <- Zoo$simRunoff/ (1000*24*3600) + Zoo$reachOutRD1 <- Zoo$reachOutRD1/ (1000*24*3600) + Zoo$reachOutRD2 <- Zoo$reachOutRD2/ (1000*24*3600) + Zoo$reachOutRG1 <- Zoo$reachOutRG1/ (1000*24*3600) + Zoo$reachOutRG2 <- Zoo$reachOutRG2/ (1000*24*3600) + # Channel storage L -> m3 + Zoo$channelStorage <- Zoo$channelStorage/ 1000 + + # Return the object + Zoo +} + +### Function ReadLoop +# fonction qui lit les fichiers de sorties distribuées ReachLoop et HRULoop et l'organise en 1 seul tableau avec une date pour chaque ligne +# works well but is really VERY slow +### DEPRECATED + +ReadLoop <- function(folder, file) +{ + +# Récupération du nb de pas de temps de la simulation +Nbblocs <- read.table(paste(folder,file,sep=""), nrows = 1, sep = "\t", skip = 3) +Nbblocs<- Nbblocs[,3] + +# Récupération du nb de brins (longueur des blocs) +Longblocs <- read.table(paste(folder,file,sep=""), nrows = 1, sep = "\t", skip = 1) +Longblocs <- Longblocs[,3] + +# Récupération des noms de colonnes dans un vecteur +Colnames <- scan(paste(folder,file,sep=""), what = "", nlines = 1, sep = "", skip = 6) + +# Lecture du fichier complet +# Initialisation dataframe Data +Data <- NULL +for (i in 0:(Nbblocs -1)) +{ +# Lecture de la date +Datetemp <- read.table(paste(folder,file,sep=""), as.is = TRUE, nrows = 1, sep = "", skip = 10 + i*(Longblocs + 3)) +# Lecture du bloc de données +Datatemp <- read.table(paste(folder,file,sep=""), nrows = Longblocs, sep = "", skip = 12 + i*(Longblocs + 3)) +# Ajout de la date dans le bloc de données +Datatemp$Date <- Datetemp[,2] +Datatemp$Heure <- Datetemp[,3] + +# Ajout du bloc données + dates au dataframe général +Data <- rbind(Data, Datatemp) +} +# Ajout des noms de colonnes +#names(Data) <- Colnames + + +# Ecrire ce tableau dans un fichier qui sera plus facile (et plus rapide) lire pour les prochaines fois +#write.table(Data, "data.txt") +# Retourner le tableau de données +Data +} + + + +## New try on function ReadLoop +# This version should be faster!! +# For daily time step (hourly time step version should not be much different) +## Add filtre for IrrigationLoop (if T, skip a line more in the header) + +# 1. it starts by reading the length of the first data block and the number of blocks in the file +# 2. then it reads the file's headers (column names) +# 3. finally it reads the data block by block, adding a day to the date at each block, until the end of the file + +ReadLoopDaily <- function(folder, file,filtre) # NTC +{ + # Open the file + con<-file(paste(folder,file,sep="")) + open(con) + + # Be careful as we remain in the same connection we must count the lines from the current line read (not from the beginning of the file) + + # Read the nb of elements (HRUs or Reaches) (length of the blocks) + Lblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Lblocks <- Lblocks[,3] + + # Get the nb of time steps fo the simulation (nb of blocks of the file) + Nblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Nblocks<- Nblocks[,3] + + # Get the col names (names of the simulated variables) + if (filtre == T) {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3)} else {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2)} + #Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2) + + # Get the start date of the simulation (we consider only the date -> daily time step) + Datestart <- read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + Datestart <- Datestart[,2] + if (filtre == T) { + read.table(con, nrows = 1, sep = "\t") + count = length(Colnames)+1 + compt <- 0 + while (count == (length(Colnames)+1)) { + obj <- read.table(con, nrows = 1, sep = "\t") + count = dim(obj)[2] + compt = compt + 1 + } + Lblocks = compt-1 + con<-file(paste(folder,file,sep="")) + open(con) + read.table(con, nrows = 1, sep = "\t", skip = 1) + read.table(con, nrows = 1, sep = "\t", skip = 1) + Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3) + # Get the start date of the simulation (we consider only the date -> daily time step) + read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + } + + # Read the data + # Initialisation of a matrix of the correct size to store the data + # nrow = nb of time steps * nb of elts + # ncol = nb of simulated variables + ncol <-length(Colnames) + Data <- matrix(nrow=Nblocks*Lblocks,ncol=ncol) + # Loop on the nb of blocks + for (i in 0:(Nblocks -1)) + { + # Read the block of data + # if i=0 (first block skip only 1 line) + if(i==0) + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 1, colClasses="numeric") + # else skip 3 lines + else + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 3, colClasses="numeric") + + # Add the values to the matrix + Data[(i*Lblocks+1):((i+1)*Lblocks),1:ncol] <- as.matrix(Datatemp) + } + + # close the file + close(con) + + # Add the colnames + colnames(Data) <- Colnames + # Create the corresponding vector of dates + dates <- as.character(seq(from = as.Date(Datestart), length.out = Nblocks, by = "day")) + + # Return the vector of dates and the data as a list + list(dates=dates, Data=Data) + } + +### Function ExtractLoopDaily +# this function processes the output of ReadLoopDaily : extracts the values of for given elt (reach or HRU) +# and returns them as a zoo multivariate object +# avoids to use JADE +# parameters: +# list = list containing the data, composed of two objects +# dates = vector of dates (strings) +# Data = matrix containing the data (numeric) +# id = id of the elt (integer) +# no unit conversions in this function (values in L/time step for discharge) +ExtractLoopDaily <- function(list,id) +{ + # packages required: zoo and chron + require(zoo) + require(chron) + + # extract values from the Data matrix with the good id + values<-subset(list$Data,list$Data[,1]==id) + # we remove the 1st column which is not useful anymore (contains the elt id) + values<-values[,-1] + # set dates vector to chron format + chrondates <- chron(list$dates, format = "y-m-d") + # transform in zoo format + Zoo <- zoo(values, chrondates) + +} + +### Function WriteJ2000Input +### function that writes data contained in a zoo vector (univariate or multivariate) into a .dat file +## at the moment for daily time step only +## used for input of JAMS models +## gets the header of the file from an external file (header), where we just update en start/end dates of the time series +## the header file must be in the same directory as the .dat target + +WriteJ2000Input_daily <- function (z,folder,filename,header) +{ + # Read the header file and split it in two parts: first 5 lines then lines 8 to 16 + header <-readLines(paste(folder,header,sep=""), n = 16) + header1<- head(header, n=5) + header2<- tail(header, n=9) + #header2 <-scan(paste(folder,header,sep=""), what = "character", skip = 7, nlines=9) + + # Now back to the z object with the data + # Conversion of dates into string with adequate format + date <- as.POSIXlt(index(z), "GMT") + date <- strftime(date, format="%d.%m.%Y", usetz = FALSE) + # create vector of hours (of the same length) + hours <- rep("07:30",length(date)) + + # Replace NoData values by the adequate code + z <- ifelse(is.na(z)==TRUE,-9999,z) + + # Write the output file + # Open the connection + filename <-paste(folder,filename,sep="",collapse="") + con <- file(filename,"w") + # Write the first part of the header + writeLines(header1,con=con) + # Write the lines corresponding to the start and end dates + writeLines(paste("dataStart",head(date, 1),head(hours,1),sep="\t"), con=con) + writeLines(paste("dataEnd",tail(date, 1),tail(hours,1),sep="\t"), con=con) + # Write the rest of the header + writeLines(header2,con=con) + # Write the data + #write.table(data.frame(date,hours, format(round(z,digits=3),trim=TRUE,nsmall=3)),filename, sep="\t",row.names=F,col.names=F,quote=F) + write.table(data.frame(date,hours,z),con, sep="\t",row.names=F,col.names=F,quote=F) + # Write the footer and close connection + writeLines("# end of file.dat",con=con) + close(con) + +} + +### Function WriteJ2000Input2 +### function that writes data contained in a zoo vector (univariate or multivariate) into a .dat file +## works for hourly time step +## used for input of JAMS models +## gets the header of the file from an external file (header) +## the header file must be in the same directory as the .dat target +WriteJ2000Input2 <- function (z,folder,filename,header) +{ + # Read the header file: the header file contains 16 lines + header <-readLines(paste(folder,header,sep=""), n = 16) + + # Now back to the z object with the data + # Conversion of dates into string with adequate format + date <- strftime(dates(index(z)), format="%d.%m.%Y", usetz = FALSE) + # Convert hours into string with adequate format + time <- substring(as.character(index(z)),11,15) + + # Replace NoData values by the adequate code + z <- ifelse(is.na(z)==TRUE,-9999,z) + + # Write the output file + # Open the connection + filename <-paste(folder,filename,sep="",collapse="") + con <- file(filename,"w") + # Write the header + writeLines(header,con=con) + # Write the data + #write.table(data.frame(date,hours, format(round(z,digits=3),trim=TRUE,nsmall=3)),filename, sep="\t",row.names=F,col.names=F,quote=F) + write.table(data.frame(date,time,z),con, sep="\t",row.names=F,col.names=F,quote=F) + # Write the footer and close connection + writeLines("# end of file.dat",con=con) + close(con) + +} + + +# as previous, but adapted for xts objects (by IG) +WriteJ2000_Input <- function (z,folder,filename,header) +{ + # Read the header file: the header file contains 16 lines + header <-readLines(paste(folder,header,sep=""), n = 16) + + # Now back to the z object with the data + # Conversion of dates into string with adequate format + date <- strftime(as.Date(index(z)), format="%d.%m.%Y", usetz = FALSE) + # Convert hours into string with adequate format + time <- substring(as.character(index(z)),11,15) + + # Replace NoData values by the adequate code + z <- ifelse(is.na(z)==TRUE,-9999,z) + + # Write the output file + # Open the connection + filename <-paste(folder,filename,sep="",collapse="") + con <- file(filename,"w") + # Write the header + writeLines(header,con=con) + # Write the data + #write.table(data.frame(date,hours, format(round(z,digits=3),trim=TRUE,nsmall=3)),filename, sep="\t",row.names=F,col.names=F,quote=F) + write.table(data.frame(date,time,z),con, sep="\t",row.names=F,col.names=F,quote=F) + # Write the footer and close connection + writeLines("# end of file.dat",con=con) + close(con) + +} + +# WriteJ2000_InputFull writes the ENTIRE xxx.dat in J2000 format, including header ! +# ------------------------------------------- +# IN : - data, an xts (with different columns for grid-cells) of the data to write in +# - dirname : the directory (ending with /) wher the .dat-file should be writen +# - filename : 'refet.dat' or 'tmean.dat' or... +# - grid : a dataframe with infos on the grid of the inputdata (alti, coordsX...). See ReadJ2000_Grid +# - description : the data description in 1st line of the .dat, after the '#'/ (ex: 'Duo ETP, 1kmx1km') +# - varname : 'tmean' or 'refte' or.... it enters the Datastore description only. +# OUT : dirname/filename is created. +WriteJ2000_InputFull <- function (data, dirname , filename, grid, description, varname){ + header=rbind('@dataValueAttribs ', + paste(varname,'0 9999 mm'), + '@dataSetAttribs' , + 'missingDataVal -9999', + 'dataStart 01.01.1985 07:30' , + 'dataEnd 31.12.2012 07:30', + 'tres d', + '@statAttribVal') + + # 1er attribut : name = numéro de maille. Ici : ID_NEW + name=unlist(c("name",grid['mailles',]) ) + # 2e attribut : ID = de 1 à 1183 + ID=unlist(c("ID",c(1:dim(grid)[2])) ) + # 3e attribut : elevation + elevation=unlist(c("elevation",grid['alti',])) + # 4e attribut : x en L2ET + x=unlist(c("x",grid['coordsX',])) + # 5e attribut : y en L2ET + y=unlist(c("y",grid['coordsY',])) + # 6e attribut : dataColumn (1 à 1183) + dataColumn=unlist(c("dataColumn",c(1:dim(grid)[2]))) + headerdata=c("@dataVal") + + myfile=paste(dirname,filename,sep="") + + write(paste('#', description),myfile) + write(header,myfile) + write(name,myfile, append=T, ncolumns=length(name), sep="\t") + write(ID,myfile, append=T, ncolumns=length(name), sep="\t") + write(elevation,myfile, append=T, ncolumns=length(name), sep="\t") + write(x,myfile, append=T, ncolumns=length(name), sep="\t") + write(y,myfile, append=T, ncolumns=length(name), sep="\t") + write(dataColumn,myfile, append=T, ncolumns=length(name), sep="\t") + write(headerdata,myfile, append=T) + + datetime=strftime(time(data), format='%Y.%m.%d 07:00') + write.table(data.frame(datetime,round(data,3)),myfile, append=T, sep="\t",row.names=F,col.names=F,quote=F ) +} + + +# ReadJ2000_Input. +# +# in : +# ** filename = 'rain.dat' (or 'temp.dat', whatever... in J2000 input format, typically from modeldata/J2000/input/local/rain.dat) +# ** folder= where the rain.dat is stored (with "/" in the end.). EX: '$HOME/modeldata/J2000/input/local/' + +# out : +# ** an xts object with columns = mailles du forçage et lignes=dates. +# +# author : I. Gouttevin on 29/03/2016 + +ReadJ2000_Input=function(folder, filename){ + library(xts) + myfile=paste(folder,filename,sep="") + missval=read.table(myfile,nrows = 1, sep = "\t", skip = 4)[2] + mailles=read.table(myfile,nrows = 1, sep = "\t", skip = 9) + mailles=mailles[-1] + alti=read.table(myfile,nrows = 1, sep = "\t", skip = 11) + alti=alti[-1] + coordsX=read.table(myfile,nrows = 1, sep = "\t", skip = 12) + coordsX=coordsX[-1] + coordsY=read.table(myfile,nrows = 1, sep = "\t", skip = 13) + coordsY=coordsY[-1] + mydata=read.table(myfile,sep = "\t", skip = 16) + date=as.POSIXct(mydata[,1], format='%d.%m.%Y') # on n'utilise pas l'heure qui est de toutes façons arbitraire + data=xts(mydata[,2:dim(mydata)[2]],date) + colnames(data)=mailles + return(data) +} + +ReadJ2000_Grid=function(folder, filename){ + myfile=paste(folder,filename,sep="") + mailles=read.table(myfile,nrows = 1, sep = "\t", skip = 9) + mailles=mailles[-1] + alti=read.table(myfile,nrows = 1, sep = "\t", skip = 11) + alti=alti[-1] + coordsX=read.table(myfile,nrows = 1, sep = "\t", skip = 12) + coordsX=coordsX[-1] + coordsY=read.table(myfile,nrows = 1, sep = "\t", skip = 13) + coordsY=coordsY[-1] + mygrid=rbind(mailles,coordsX,coordsY,alti) + mygrid=round(mygrid,2) + row.names(mygrid)=c('mailles','coordsX','coordsY','alti') + names(mygrid)=mailles + return(mygrid) +} + diff --git a/irrigation-R-codes/lib_old/zoo_functions.r b/irrigation-R-codes/lib_old/zoo_functions.r new file mode 100644 index 0000000000000000000000000000000000000000..b88cd65bcd335f19acc8dc86ea558715dd81ff08 --- /dev/null +++ b/irrigation-R-codes/lib_old/zoo_functions.r @@ -0,0 +1,67 @@ +library(chron) +library(zoo) + +createZoo <- function (table, date_start, date_end,list_val,format_date) +{ + #Creer un objet zoo ? partir de la table [table] + #sur la p?riode [date_start] ("yyyy-mm-dd" incluse) ? [date_end] (excluse) avec les variables + #dont l'ent?te est dans la liste [list_val] pour un format de date choisi [format_date] + + #Trouver les indexs correspondant aux dates + date <- as.Date(rownames(table), format_date) + index_start <- which(date==date_start,arr.ind=TRUE)[1] + index_end <- which(date==date_end,arr.ind=TRUE)[1] + + #Mets les variables d'interet et sur la periode donn?e dans values. Idem pour date + values <- table[index_start:index_end,list_val] + date <- as.character(strptime(date[index_start:index_end],"%Y-%m-%d")) + + # Get the times and convert it to hh:mm:ss + # if (length(table[index_start:index_end,"ID"])!=0) {heure <- as.character(table[index_start:index_end,"ID"])} else { + heure <- rep(12,length(date)) + Times <- paste(heure,":00:00",sep="") + + # Create the chron dates-times series + chron <- chron (date,Times,format = c(dates = "y-m-d", times = "h:m:s")) + + # Build the zoo vector of data + values_zoo <- zoo(values,chron) + + return(values_zoo) +} + +aggregateZoo <- function (z, timeStep, sumOrMeanFunction) # NTK ?? -- already in aggregateZoo_functions.r >< +{ + #Retourne un nouveau objet zoo aggr?g? sur le pas de temps timeStep + #(["dmy","my","m","sy","s"]) en faisant la somme ou la moyenne ([sum,mean]) + + if(timeStep == "dmy"){ + return (aggregate(z, time(z) - as.numeric(time(z)) %% 1, sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "my"){ + return (aggregate(z, as.Date(as.yearmon(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "y"){ + return (aggregate(z, format(as.Date(index(z)), '%y'), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "m"){ + return (aggregate(z, format(as.Date(index(z)), '%m'), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "sy"){ + return (aggregate(z, as.Date(as.yearqtr(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + + if(timeStep == "s"){ + return (aggregate(z, quarters(time(z)), sumOrMeanFunction,na.rm = TRUE)) + } + + print("Type not understood") + +} + + + diff --git a/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv b/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv new file mode 100644 index 0000000000000000000000000000000000000000..a0a48e8bcd7c69c2b960621f44466312a5627a63 --- /dev/null +++ b/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv @@ -0,0 +1,1390 @@ +"HRUnum" "HRUarea" "irrigated" "irrig_type" "area_error" +8014 7480000 0 0 0.0146892583201774 +9236 6480000 0 0 0.343988589646793 +12290 5320000 0 0 -6.09945292724748 +9419 9680000 0 0 -1.39604514845467 +5847 10520000 1 1 0.0544989589301492 +8431 5520000 0 0 -1.64585659198784 +7122 7320000 0 0 53.8922401893697 +9634 8720000 1 1 24.0802989407914 +5757 5120000 0 0 -4.71021048187684 +6321 7960000 0 0 0.038178287067514 +10410 5080000 1 2 37.2750364805707 +7282 5440000 0 0 53.8922401893697 +9522 5880000 0 0 24.0802989407914 +6255 8600000 0 0 0.314244744790559 +11949 10120000 0 0 -0.058992955547077 +6227 8120000 0 0 0.038178287067514 +12173 8560000 0 0 0.125824786481687 +7270 5440000 0 0 53.8922401893697 +5919 7320000 0 0 -0.116244026348644 +5268 6280000 0 0 32.0164527967292 +10302 5800000 0 0 -0.495372461305743 +12397 640000 0 0 0.447304402417004 +6450 5400000 0 0 0.206532547999694 +2324 7680000 0 0 18.3106342000275 +7780 7360000 0 0 -2.08303522148858 +3892 6800000 0 0 -8.32287745713091 +7120 7680000 0 0 -1.8722893006199 +8871 6720000 0 0 -1.39604514845467 +10120 6040000 1 1 -0.495372461305743 +10894 10800000 0 0 0.119477926831113 +8000 8200000 0 0 -0.306651691632065 +7886 10720000 0 0 1.40746568756009 +11864 8280000 0 0 -0.83662462309424 +6153 10360000 0 0 -0.551744956313205 +9729 9640000 0 0 -0.221181092903122 +12445 5560000 0 0 -0.140252454417955 +6420 5840000 0 0 4.94032442818608 +6419 5640000 0 0 -5.79396041034532 +3805 8600000 0 0 -80.7900108056189 +10679 10720000 0 0 -1.12226232263806 +8226 5400000 0 0 1.40746568756009 +8897 7280000 0 0 -1.64585659198784 +4930 8240000 0 0 32.0164527967292 +6538 7520000 0 0 -5.79396041034532 +7366 8920000 0 0 -9.83638412809451 +1957 3160000 0 0 -0.27841974050552 +2155 9600000 0 0 18.3106342000275 +7679 6280000 0 0 -2.08303522148858 +12196 7760000 0 0 -0.058992955547077 +3168 10560000 0 0 9.43801552398515 +2407 9280000 0 0 1.78196249319118 +7779 9e+06 0 0 0.0891522581372151 +11897 6280000 0 0 -0.83662462309424 +7060 6840000 0 0 -9.83638412809451 +10456 10920000 1 2 -1.50166421549656 +7176 10200000 0 0 -9.83638412809451 +6480 10040000 0 0 -2.17308231217996 +11985 9640000 0 0 0.125824786481687 +3263 9120000 0 0 9.43801552398515 +7567 5560000 0 0 0.0891522581372151 +7121 5040000 0 0 53.8922401893697 +12435 7760000 1 1 -0.140252454417955 +7546 6e+06 0 0 -2.08303522148858 +10588 8680000 0 0 -1.12226232263806 +11876 6640000 0 0 -33.5652282911849 +6983 7640000 0 0 63.7310840982386 +10426 7680000 0 0 -1.12226232263806 +10938 11520000 0 0 0.119477926831113 +1880 7160000 0 0 -0.27841974050552 +11957 8400000 1 1 -0.058992955547077 +6557 7600000 0 0 -5.79396041034532 +9689 5560000 0 0 -0.221181092903122 +12215 5560000 0 0 -6.09945292724748 +8258 5440000 0 0 12.472637050972 +9990 6040000 0 0 1.55966630395357 +12375 8120000 1 1 -1.05283433953274 +2429 9e+06 0 0 -1.14280405091517 +7155 6320000 0 0 53.8922401893697 +11032 6720000 0 0 0.119477926831113 +11711 5680000 0 0 153.286929621036 +11401 1800000 0 0 -0.156716040645066 +7894 5400000 0 0 0.121805940382025 +11865 7720000 0 0 -1.66218771772668 +9148 5040000 0 0 0.343988589646793 +5190 9720000 0 0 -2.68556450220527 +7309 7240000 0 0 -1.8722893006199 +11694 10040000 0 0 -0.83662462309424 +5852 8480000 0 0 -7.43612968868264 +11082 9760000 0 0 7.53720595295246 +5008 5360000 1 2 32.0164527967292 +11721 5440000 0 0 153.286929621036 +10364 8400000 0 0 -0.495372461305743 +8783 8880000 0 0 1.16534278118412 +12260 5200000 1 2 5.35656754633371 +2265 6440000 0 0 18.3106342000275 +6514 10320000 1 1 0.038178287067514 +2267 5240000 0 0 18.3106342000275 +12303 8880000 1 1 -0.058992955547077 +7540 6600000 0 0 -8.54848603133888 +7462 11280000 1 1 -2.08303522148858 +11910 9680000 1 2 -0.758663112569202 +6427 9840000 0 0 0.038178287067514 +9061 7680000 0 0 -0.0445256204054556 +11680 5040000 0 0 -0.292001994159963 +7119 9880000 0 0 -1.8722893006199 +5789 9560000 0 0 0.0544989589301492 +10246 6400000 0 0 1.80352436566587 +9063 9120000 0 0 -1.39604514845467 +7084 9120000 0 0 53.8922401893697 +5995 11640000 0 0 0.038178287067514 +3186 6080000 0 0 9.43801552398515 +9188 8960000 0 0 -1.39604514845467 +7912 8480000 1 1 -0.306651691632065 +12107 7760000 0 0 -2.97238939679331 +9064 10040000 0 0 -1.39604514845467 +7238 9880000 0 0 -9.83638412809451 +9107 5560000 0 0 -0.0445256204054556 +11468 5680000 0 0 10.757870987591 +7236 6e+06 0 0 -9.83638412809451 +1713 5360000 0 0 0.6144911877912 +7091 8720000 0 0 -9.83638412809451 +2533 8040000 0 0 -1.14280405091517 +5414 7840000 0 0 -2.68556450220527 +7709 9840000 1 2 0.0146892583201774 +11470 7560000 0 0 -7.59660914022173 +4886 10720000 0 0 32.0164527967292 +9096 2280000 0 0 -1.39604514845467 +12079 6040000 0 0 -0.095068684640276 +11407 10440000 0 0 -7.59660914022173 +3617 9800000 0 0 -80.7900108056189 +11656 7080000 0 0 -0.292001994159963 +7915 9e+06 0 0 64.9882329460878 +7198 7160000 0 0 53.8922401893697 +11617 7760000 0 0 -0.292001994159963 +6060 6800000 0 0 11.9812059514487 +5090 11320000 0 0 -2.68556450220527 +6030 1400000 0 0 0.038178287067514 +3641 10880000 0 0 -80.7900108056189 +6366 12400000 1 1 -0.551744956313205 +8567 6800000 0 0 -1.64585659198784 +7307 7360000 0 0 -9.83638412809451 +6008 6920000 0 0 -0.551744956313205 +5123 9360000 0 0 32.0164527967292 +6378 10040000 0 0 -0.551744956313205 +8078 5680000 1 2 64.9882329460878 +12033 5520000 0 0 0.28943560057888 +6584 5800000 0 0 -2.17308231217996 +8442 8720000 1 1 12.472637050972 +8154 6240000 0 0 1.40746568756009 +3167 7880000 0 0 9.43801552398515 +3234 5680000 0 0 9.43801552398515 +10236 10600000 0 0 -1.50166421549656 +8294 8360000 0 0 12.472637050972 +9194 11160000 0 0 -0.0445256204054556 +7871 9920000 0 0 0.0146892583201774 +9655 4160000 0 0 1.55966630395357 +6359 6120000 0 0 0.038178287067514 +8128 9200000 1 2 69.3619169237608 +9248 8480000 0 0 0.343988589646793 +9871 6360000 0 0 0.109093242636216 +10234 520000 0 0 -0.495372461305743 +5798 5440000 0 0 0.0544989589301492 +5785 8480000 0 0 -7.43612968868264 +12394 8e+06 0 0 0.447304402417004 +1823 7240000 0 0 -0.27841974050552 +8237 8160000 1 2 69.3619169237608 +10468 11800000 0 0 -1.12226232263806 +11510 6800000 0 0 -33.5652282911849 +8133 5960000 0 0 69.3619169237608 +6234 5440000 0 0 0.038178287067514 +7715 9680000 0 0 0.0891522581372151 +2368 6160000 0 0 1.78196249319118 +5598 6200000 1 1 -2.68556450220527 +9852 11080000 1 1 0.109093242636216 +11630 8040000 0 0 -0.292001994159963 +7304 7400000 0 0 -9.83638412809451 +12003 7160000 0 0 0.125824786481687 +1784 6440000 0 0 0.6144911877912 +6146 5360000 0 0 -3.1698779809611 +5568 6840000 0 0 1.73410404624277 +2419 5720000 0 0 -1.14280405091517 +7568 7160000 0 0 -8.54848603133888 +7480 7880000 1 1 -2.08303522148858 +8012 6800000 0 0 1.40746568756009 +6727 8400000 0 0 0.206532547999694 +5599 5320000 0 0 -7.43612968868264 +7470 1760000 0 0 -1.8722893006199 +6412 7360000 0 0 4.94032442818608 +7820 7720000 0 0 0.121805940382025 +9304 7240000 0 0 -0.0445256204054556 +1679 5760000 0 0 0.6144911877912 +7495 9880000 0 0 0.0891522581372151 +12437 9880000 0 0 0.447304402417004 +11851 5320000 0 0 -1.66218771772668 +7976 6520000 0 0 0.0146892583201774 +4304 7320000 0 0 -8.32287745713091 +12353 10720000 1 1 -1.05283433953274 +11790 9160000 0 0 -1.66218771772668 +11829 7480000 0 0 -0.156716040645066 +5805 8040000 0 0 -0.116244026348644 +6137 7920000 1 1 0.314244744790559 +10490 8e+06 0 0 37.2750364805707 +9656 7880000 0 0 -0.646502369334301 +12009 8760000 0 0 -8.74375475292944 +5777 480000 1 1 -4.71021048187684 +8449 5360000 0 0 -1.64585659198784 +6392 6760000 1 1 0.365234432995809 +9525 7080000 0 0 -0.646502369334301 +7751 9320000 0 0 -0.306651691632065 +11267 8240000 0 0 -0.838323353293413 +12330 7440000 0 0 -6.09945292724748 +8453 6480000 0 0 -1.64585659198784 +11455 7720000 0 0 0.119477926831113 +7115 9160000 1 1 53.8922401893697 +8413 8e+06 0 0 12.472637050972 +11574 9080000 0 0 -0.156716040645066 +3739 7320000 0 0 -80.7900108056189 +2232 13080000 1 1 1.78196249319118 +6572 10160000 0 0 -2.17308231217996 +10327 7840000 0 0 -1.50166421549656 +11963 5280000 0 0 -8.74375475292944 +9670 8960000 0 0 0.109093242636216 +11631 9800000 1 2 -0.292001994159963 +8003 9600000 0 0 -0.306651691632065 +11799 5400000 0 0 -0.83662462309424 +10398 9440000 1 2 -1.50166421549656 +12345 6080000 0 0 -6.09945292724748 +11872 9120000 0 0 -0.83662462309424 +11069 6640000 0 0 0.119477926831113 +5526 5280000 0 0 -2.68556450220527 +5885 6600000 1 1 -2.56432968687719 +6278 7400000 0 0 0.038178287067514 +9734 6720000 0 0 1.55966630395357 +2165 12240000 0 0 18.3106342000275 +11919 5840000 0 0 -0.83662462309424 +6401 7800000 0 0 -3.1698779809611 +8391 10720000 0 0 12.472637050972 +6461 1320000 0 0 -0.551744956313205 +5379 7400000 0 0 -2.68556450220527 +10785 5760000 0 0 -2.11754728482204 +6340 7e+06 0 0 0.038178287067514 +11534 7200000 0 0 -0.156716040645066 +7221 6560000 0 0 -9.83638412809451 +9949 12800000 0 0 1.80352436566587 +5551 7120000 0 0 0.0544989589301492 +6648 6840000 0 0 0.206532547999694 +2026 5440000 0 0 -0.27841974050552 +11166 10520000 0 0 0.119477926831113 +6626 9600000 0 0 -2.17308231217996 +2483 6360000 0 0 -1.14280405091517 +11971 6680000 0 0 -0.758663112569202 +12109 7e+06 1 2 -7.72068673638987 +10195 8400000 0 0 -1.50166421549656 +11753 5320000 0 0 -0.156716040645066 +12317 6680000 0 0 -6.09945292724748 +12307 840000 0 0 -6.09945292724748 +11932 6200000 0 0 -8.74375475292944 +9860 8160000 1 1 -7.71527447920201 +10452 5480000 0 0 -0.495372461305743 +12202 8520000 0 0 -0.095068684640276 +6353 7440000 0 0 -0.551744956313205 +7509 6760000 0 0 -2.08303522148858 +2455 6600000 0 0 -1.14280405091517 +11742 9480000 0 0 -33.5652282911849 +9578 6960000 0 0 -0.221181092903122 +6512 9080000 0 0 0.038178287067514 +7957 6120000 0 0 -0.306651691632065 +4569 6720000 0 0 602.524698133919 +7586 7840000 0 0 -0.643837817130288 +12281 7360000 0 0 -6.09945292724748 +2410 7520000 0 0 -1.14280405091517 +10733 9e+06 0 0 -2.11754728482204 +6545 5840000 0 0 -5.79396041034532 +2337 8840000 0 0 18.3106342000275 +12040 6560000 1 2 0.125824786481687 +8784 8160000 1 1 -1.64585659198784 +7975 9160000 1 1 0.121805940382025 +5860 7760000 0 0 -0.116244026348644 +9733 8120000 0 0 0.109093242636216 +9043 7240000 0 0 -1.39604514845467 +10749 1e+07 0 0 -2.11754728482204 +10711 5080000 0 0 -2.11754728482204 +9731 8520000 0 0 -0.646502369334301 +7809 8640000 0 0 0.0146892583201774 +8227 9160000 0 0 12.472637050972 +6608 9880000 0 0 4.94032442818608 +7573 5280000 0 0 -0.643837817130288 +8575 7680000 0 0 -1.64585659198784 +7265 7240000 0 0 2.26546410056103 +10405 11240000 0 0 -0.495372461305743 +9186 6480000 0 0 0.343988589646793 +6211 5600000 0 0 -3.1698779809611 +9830 9960000 0 0 0.109093242636216 +8343 10160000 1 1 12.472637050972 +7392 8640000 0 0 -8.54848603133888 +11652 8280000 0 0 -33.5652282911849 +7590 6680000 0 0 -2.08303522148858 +7136 5440000 0 0 53.8922401893697 +11675 5440000 0 0 27.3757584875382 +6382 5640000 0 0 -2.17308231217996 +6611 6200000 0 0 0.365234432995809 +12151 15200000 1 2 -0.095068684640276 +11374 5240000 0 0 1.45433391506691 +9630 5400000 0 0 24.0802989407914 +8052 11680000 1 1 -0.306651691632065 +1693 5960000 1 2 0.6144911877912 +5518 5040000 1 1 1.73410404624277 +7343 8520000 0 0 -0.643837817130288 +12046 9800000 1 2 -7.72068673638987 +6019 8200000 0 0 -7.43612968868264 +8686 1e+07 1 1 1.16534278118412 +2391 7720000 0 0 1.78196249319118 +11398 6880000 0 0 0.119477926831113 +12146 8080000 0 0 -0.095068684640276 +7095 7480000 0 0 -1.8722893006199 +7474 5760000 0 0 -8.54848603133888 +6044 10080000 0 0 11.9812059514487 +11607 3240000 1 2 10.757870987591 +8854 5560000 0 0 -1.64585659198784 +7892 10640000 0 0 0.121805940382025 +9193 8320000 0 0 -0.0445256204054556 +11726 5080000 1 2 27.3757584875382 +12091 5360000 0 0 -0.058992955547077 +9273 8040000 0 0 0.343988589646793 +7649 9520000 0 0 -2.08303522148858 +12214 5320000 0 0 -0.095068684640276 +8986 7240000 0 0 -1.39604514845467 +3754 7960000 0 0 -80.7900108056189 +7409 6e+06 0 0 -2.08303522148858 +7896 11440000 1 1 -0.306651691632065 +6149 5400000 0 0 -0.551744956313205 +11739 6960000 0 0 -0.292001994159963 +10500 10200000 0 0 -1.12226232263806 +10520 5640000 0 0 -1.12226232263806 +6350 5920000 1 2 -3.1698779809611 +11274 7800000 0 0 10.757870987591 +10512 5160000 0 0 -1.12226232263806 +11512 6200000 0 0 1.45433391506691 +6396 1.2e+07 0 0 0.038178287067514 +1688 10400000 0 0 0.6144911877912 +6214 8320000 0 0 0.038178287067514 +7999 6320000 0 0 -2.57837679460885 +7292 2e+05 0 0 -9.83638412809451 +9108 5760000 0 0 -1.39604514845467 +12320 4040000 0 0 -1.05283433953274 +12153 7120000 0 0 -0.058992955547077 +12272 6200000 0 0 -6.09945292724748 +5996 7720000 0 0 0.038178287067514 +10438 8720000 0 0 -1.12226232263806 +10395 6520000 0 0 -2.11754728482204 +11375 11240000 1 2 -7.59660914022173 +11673 6360000 0 0 -33.5652282911849 +7671 5480000 0 0 -2.08303522148858 +1881 5440000 0 0 -0.27841974050552 +5806 7480000 0 0 -7.43612968868264 +5226 10960000 0 0 -2.68556450220527 +6327 6e+06 0 0 0.365234432995809 +11232 5400000 0 0 -0.838323353293413 +7678 8e+06 0 0 0.0891522581372151 +8121 8560000 0 0 1.40746568756009 +2430 8960000 0 0 -1.14280405091517 +7305 8080000 0 0 53.8922401893697 +8311 5760000 0 0 12.472637050972 +10379 7200000 0 0 -1.50166421549656 +11355 6360000 0 0 1.45433391506691 +7178 5480000 0 0 -9.83638412809451 +6339 8280000 0 0 -0.551744956313205 +8926 6360000 0 0 -1.39604514845467 +8727 6920000 0 0 1.16534278118412 +7177 7040000 0 0 -9.83638412809451 +5879 8240000 0 0 138.411810554307 +11572 6120000 0 0 -0.838323353293413 +9891 6080000 1 1 0.109093242636216 +11832 7600000 0 0 -0.058992955547077 +12180 8280000 0 0 -6.09945292724748 +12059 7800000 0 0 -0.095068684640276 +7134 10880000 0 0 2.26546410056103 +6434 5680000 1 1 4.94032442818608 +2475 5160000 0 0 -1.14280405091517 +6588 9080000 0 0 0.206532547999694 +7220 5440000 0 0 -9.83638412809451 +6403 1e+07 0 0 0.038178287067514 +6468 6040000 0 0 4.94032442818608 +5874 10800000 0 0 -4.71021048187684 +11940 11160000 1 1 -8.74375475292944 +8013 9520000 0 0 0.0146892583201774 +6150 6240000 0 0 -0.551744956313205 +7053 8320000 0 0 2.26546410056103 +12393 5760000 0 0 -6.09945292724748 +6441 9160000 0 0 -5.79396041034532 +11483 8e+06 0 0 10.757870987591 +6050 9720000 0 0 -7.43612968868264 +4083 7560000 0 0 -8.32287745713091 +1879 10560000 0 0 0.6144911877912 +7788 5320000 0 0 0.0146892583201774 +8253 8240000 0 0 12.472637050972 +9300 6200000 0 0 0.343988589646793 +7770 5560000 0 0 0.0891522581372151 +9403 9120000 0 0 -0.221181092903122 +6268 6440000 0 0 0.038178287067514 +8160 10080000 0 0 -2.57837679460885 +12219 6080000 0 0 -0.095068684640276 +2153 11440000 0 0 1.78196249319118 +5163 6040000 0 0 -2.68556450220527 +6604 8240000 0 0 -2.17308231217996 +3934 1.3e+07 0 0 -80.7900108056189 +9825 8280000 0 0 0.109093242636216 +9472 8e+06 0 0 -0.646502369334301 +9527 12920000 0 0 -0.646502369334301 +1581 8640000 0 0 0.6144911877912 +11491 3600000 0 0 1.45433391506691 +1743 8600000 0 0 -0.27841974050552 +12337 9320000 0 0 -1.05283433953274 +2397 5320000 1 2 -1.14280405091517 +11878 6040000 0 0 -0.058992955547077 +9847 7240000 0 0 0.109093242636216 +11968 9600000 0 0 -0.058992955547077 +5778 2680000 1 1 -4.71021048187684 +12017 7160000 0 0 -2.97238939679331 +5282 8480000 0 0 -2.68556450220527 +6772 6200000 0 0 0.206532547999694 +8905 10600000 1 2 -0.0445256204054556 +7956 5480000 0 0 0.121805940382025 +11885 6880000 0 0 -8.74375475292944 +5967 11320000 1 1 -0.551744956313205 +2268 11480000 0 0 1.78196249319118 +11818 7600000 0 0 -0.83662462309424 +7984 6840000 0 0 0.121805940382025 +5880 8120000 0 0 -0.116244026348644 +10156 5640000 0 0 1.80352436566587 +8803 10480000 0 0 -0.0445256204054556 +7315 10040000 1 2 -1.8722893006199 +6297 5440000 0 0 -3.1698779809611 +12023 7240000 1 2 -23.6978149367615 +6443 9560000 0 0 4.94032442818608 +7961 5680000 0 0 1.40746568756009 +1691 8400000 1 2 1009.49676396777 +12167 9720000 0 0 -16.9158143194335 +12356 9680000 0 0 -0.140252454417955 +5141 7200000 0 0 32.0164527967292 +7876 5240000 0 0 -0.306651691632065 +5802 8040000 0 0 -0.116244026348644 +5908 6320000 0 0 -0.551744956313205 +2500 9840000 0 0 -1.14280405091517 +9117 6e+05 0 0 -1.39604514845467 +5117 9480000 0 0 -2.68556450220527 +7599 9320000 1 1 0.0891522581372151 +4031 5480000 1 2 -8.32287745713091 +6409 6520000 1 1 -5.79396041034532 +11670 5800000 0 0 -0.156716040645066 +9263 6400000 0 0 -1.39604514845467 +8943 8280000 0 0 -1.39604514845467 +11922 11360000 0 0 -0.758663112569202 +10063 9520000 0 0 1.80352436566587 +7625 7240000 0 0 -2.08303522148858 +1661 9800000 0 0 1009.49676396777 +9614 10240000 0 0 0.109093242636216 +10158 10400000 0 0 -1.50166421549656 +10016 6560000 0 0 -7.71527447920201 +11622 5200000 0 0 -0.292001994159963 +10557 6440000 0 0 -2.11754728482204 +6116 7160000 0 0 -0.551744956313205 +9195 7240000 0 0 -1.39604514845467 +6263 8160000 0 0 -0.551744956313205 +5975 80000 0 0 -2.56432968687719 +7103 8600000 0 0 2.26546410056103 +8441 5080000 0 0 -1.64585659198784 +12319 5800000 0 0 -0.095068684640276 +6188 5960000 0 0 0.038178287067514 +8115 6960000 0 0 -0.306651691632065 +9890 6680000 0 0 1.80352436566587 +5467 7960000 0 0 -2.68556450220527 +7135 11040000 1 2 -1.8722893006199 +11189 5080000 0 0 0.119477926831113 +11862 9560000 0 0 18.6142102172637 +6109 6920000 0 0 -0.546546008422188 +2269 5400000 0 0 1.78196249319118 +9718 9200000 0 0 0.109093242636216 +11562 7160000 0 0 -7.59660914022173 +5898 11480000 1 1 -7.43612968868264 +10703 8520000 0 0 -1.12226232263806 +6062 8720000 0 0 -0.546546008422188 +7816 8640000 0 0 0.121805940382025 +11305 6360000 0 0 -7.59660914022173 +6147 6040000 0 0 -28.0437581612806 +12036 9680000 0 0 -2.97238939679331 +9492 1.2e+07 1 1 -1.39604514845467 +10947 11120000 0 0 0.119477926831113 +6394 4120000 0 0 0.038178287067514 +10758 8600000 1 2 1.48969312214626 +4840 8480000 0 0 32.0164527967292 +7992 6200000 0 0 -0.306651691632065 +12408 5560000 0 0 -0.140252454417955 +4996 7640000 0 0 32.0164527967292 +10604 8040000 0 0 -2.11754728482204 +11354 9400000 0 0 0.119477926831113 +11584 5600000 0 0 -7.59660914022173 +11319 1160000 0 0 0.119477926831113 +8678 3520000 0 0 1.16534278118412 +5743 7840000 0 0 -4.71021048187684 +5943 9560000 0 0 -0.116244026348644 +11606 8240000 0 0 18.6142102172637 +6247 6280000 0 0 0.038178287067514 +10224 6880000 0 0 5.17010811668441 +10025 6240000 0 0 -7.71527447920201 +9721 6480000 0 0 -0.221181092903122 +6352 7200000 0 0 0.314244744790559 +6447 6680000 0 0 -2.17308231217996 +3903 8440000 0 0 -80.7900108056189 +10931 8e+06 0 0 0.119477926831113 +8741 8760000 1 1 1.16534278118412 +7802 8920000 0 0 1.40746568756009 +11734 10040000 0 0 18.6142102172637 +9035 8320000 0 0 -1.39604514845467 +7968 7800000 0 0 0.0146892583201774 +9536 11680000 1 1 24.0802989407914 +7913 8160000 0 0 0.0146892583201774 +2025 11360000 0 0 -0.27841974050552 +12005 7560000 0 0 -0.058992955547077 +12364 9440000 1 1 -1.05283433953274 +7380 9040000 0 0 -1.8722893006199 +10636 5960000 0 0 1.48969312214626 +8644 6880000 0 0 -1.64585659198784 +5583 9720000 0 0 0.0544989589301492 +6379 9400000 0 0 0.038178287067514 +5878 5200000 1 2 138.411810554307 +1930 7680000 0 0 0.6144911877912 +5715 9040000 0 0 0.0544989589301492 +2347 9880000 0 0 18.3106342000275 +5304 8040000 0 0 -2.68556450220527 +9463 6760000 0 0 24.0802989407914 +11598 11760000 0 0 -33.5652282911849 +12289 6560000 0 0 -0.095068684640276 +11188 5840000 0 0 0.119477926831113 +8322 7840000 0 0 12.472637050972 +7955 6680000 0 0 69.3619169237608 +5940 9520000 0 0 138.411810554307 +10276 11360000 0 0 -0.495372461305743 +9350 8320000 0 0 0.343988589646793 +8050 5680000 0 0 -0.306651691632065 +9940 16600000 0 0 -7.71527447920201 +6958 9480000 0 0 123.662884927066 +7925 9200000 1 2 69.3619169237608 +6133 5160000 1 1 -0.546546008422188 +12125 8320000 0 0 -2.97238939679331 +6243 6240000 0 0 -0.551744956313205 +11120 6920000 1 2 0.119477926831113 +7066 7880000 1 1 53.8922401893697 +12274 7040000 0 0 -6.09945292724748 +6481 7400000 1 1 113.996529786003 +7422 9840000 0 0 -1.8722893006199 +11881 6800000 0 0 -0.83662462309424 +9336 6520000 0 0 0.343988589646793 +7175 9760000 0 0 -1.8722893006199 +6509 5480000 0 0 0.206532547999694 +8310 6560000 1 1 0.706171323303662 +11923 6960000 1 2 0.28943560057888 +12000 6520000 0 0 153.286929621036 +9311 11400000 1 2 -0.0445256204054556 +7440 8040000 1 2 -0.643837817130288 +7204 9160000 1 1 53.8922401893697 +3714 8240000 0 0 -80.7900108056189 +9775 10040000 0 0 0.109093242636216 +12155 5160000 1 2 5.35656754633371 +11987 10800000 0 0 -0.758663112569202 +1646 5320000 0 0 0.6144911877912 +6178 11160000 0 0 0.314244744790559 +11450 10200000 0 0 -33.5652282911849 +8609 5720000 0 0 -1.64585659198784 +7237 11600000 1 1 -9.83638412809451 +8168 8160000 0 0 12.472637050972 +5331 7240000 0 0 -2.68556450220527 +8337 11200000 1 1 12.472637050972 +11540 7520000 0 0 -7.59660914022173 +6638 6480000 0 0 -2.17308231217996 +9421 6880000 1 1 -0.221181092903122 +5857 10280000 0 0 -4.71021048187684 +12423 10240000 1 1 0.447304402417004 +11434 7280000 0 0 -0.838323353293413 +11763 5720000 0 0 -33.5652282911849 +6393 10640000 0 0 -0.551744956313205 +3356 6680000 0 0 9.43801552398515 +6684 7200000 0 0 -2.17308231217996 +5180 5680000 0 0 32.0164527967292 +9362 6040000 1 1 24.0802989407914 +6371 6760000 0 0 -0.551744956313205 +10834 5400000 0 0 1.48969312214626 +7556 5560000 0 0 -0.643837817130288 +1692 6720000 0 0 0.6144911877912 +11797 8680000 0 0 -0.156716040645066 +10609 8720000 0 0 -1.12226232263806 +6386 7920000 0 0 -5.79396041034532 +8280 10120000 1 1 12.472637050972 +11063 5640000 0 0 0.119477926831113 +9808 7480000 0 0 1.55966630395357 +8436 5120000 0 0 19.9033614698601 +11366 6920000 0 0 10.757870987591 +11216 7960000 0 0 0.119477926831113 +11608 4360000 1 1 1.45433391506691 +4528 8840000 0 0 602.524698133919 +8061 10080000 0 0 -0.306651691632065 +9079 8920000 0 0 -1.39604514845467 +6551 5520000 0 0 0.365234432995809 +5113 7200000 0 0 -2.68556450220527 +10288 5080000 0 0 -1.50166421549656 +11904 8080000 0 0 -8.74375475292944 +7939 10680000 1 2 0.0146892583201774 +11937 10600000 0 0 153.286929621036 +2242 6040000 0 0 18.3106342000275 +8095 9840000 0 0 -0.306651691632065 +9679 1600000 0 0 1.55966630395357 +6606 8040000 0 0 0.365234432995809 +6218 6080000 0 0 -0.546546008422188 +9410 7880000 0 0 24.0802989407914 +2378 6960000 0 0 18.3106342000275 +9171 11880000 1 1 -1.39604514845467 +1766 7800000 0 0 -0.27841974050552 +11417 9640000 0 0 -7.59660914022173 +11536 5480000 0 0 -0.292001994159963 +11857 7160000 0 0 153.286929621036 +11750 5480000 0 0 -33.5652282911849 +1839 6480000 0 0 0.6144911877912 +6911 5280000 1 2 63.7310840982386 +6573 11520000 1 2 -2.17308231217996 +11952 7560000 0 0 -0.83662462309424 +1765 5400000 0 0 -0.27841974050552 +5255 2440000 0 0 32.0164527967292 +8024 8680000 0 0 64.9882329460878 +9627 7240000 1 1 -0.221181092903122 +10545 10480000 0 0 5.17010811668441 +5133 7280000 0 0 -2.68556450220527 +4210 11240000 0 0 -8.32287745713091 +6206 7080000 0 0 -28.0437581612806 +10620 6600000 0 0 1.48969312214626 +11786 9240000 0 0 -0.83662462309424 +7985 9e+06 0 0 -0.306651691632065 +5613 12040000 0 0 -2.68556450220527 +2607 10800000 0 0 -1.14280405091517 +12308 7040000 0 0 -0.095068684640276 +4662 10760000 0 0 602.524698133919 +1785 5280000 0 0 0.6144911877912 +11827 7280000 0 0 -0.83662462309424 +7423 6480000 0 0 -8.54848603133888 +6289 5600000 0 0 0.314244744790559 +6686 7880000 0 0 0.365234432995809 +11523 5040000 0 0 -0.156716040645066 +12066 9480000 0 0 -0.058992955547077 +10267 7600000 0 0 -0.495372461305743 +11198 11480000 0 0 1.45433391506691 +8263 6840000 0 0 69.3619169237608 +12342 10760000 1 1 -1.05283433953274 +9301 5200000 1 1 0.343988589646793 +6411 6280000 0 0 -0.551744956313205 +7588 5320000 0 0 -2.08303522148858 +11333 7520000 0 0 1.45433391506691 +11588 5400000 0 0 -0.156716040645066 +3302 14640000 0 0 9.43801552398515 +7572 10480000 0 0 -0.643837817130288 +11841 7360000 0 0 0.28943560057888 +2384 9160000 0 0 18.3106342000275 +11877 7440000 0 0 -0.758663112569202 +6513 6e+06 0 0 0.038178287067514 +8126 7800000 0 0 69.3619169237608 +6308 8960000 0 0 -0.551744956313205 +11855 8480000 0 0 -0.758663112569202 +2564 6760000 0 0 -1.14280405091517 +10378 9320000 0 0 -1.12226232263806 +5707 8320000 0 0 0.0544989589301492 +12417 8160000 0 0 0.447304402417004 +12150 10560000 1 1 -2.97238939679331 +11227 11760000 0 0 7.53720595295246 +11626 6120000 1 2 -0.156716040645066 +12276 10440000 0 0 -6.09945292724748 +7906 6920000 0 0 -0.306651691632065 +12212 8080000 0 0 -0.058992955547077 +11406 4200000 0 0 10.757870987591 +11473 8120000 0 0 -7.59660914022173 +8496 5960000 0 0 -1.64585659198784 +5653 5120000 0 0 0.0544989589301492 +7219 8840000 0 0 -1.8722893006199 +9842 8040000 0 0 1.55966630395357 +11751 5480000 0 0 -33.5652282911849 +6038 5720000 1 1 11.9812059514487 +6426 5160000 1 2 319.409900024384 +9034 5320000 0 0 -1.39604514845467 +5416 11960000 0 0 1.73410404624277 +7471 6120000 0 0 -8.54848603133888 +8040 8760000 0 0 69.3619169237608 +9730 9e+06 0 0 -0.221181092903122 +5096 8800000 0 0 32.0164527967292 +3886 7400000 0 0 -80.7900108056189 +12286 9e+06 0 0 -28.2106827541909 +8118 6120000 0 0 12.472637050972 +8562 8280000 1 1 -1.64585659198784 +11586 5200000 0 0 -1.66218771772668 +10970 6200000 0 0 0.119477926831113 +12087 12160000 0 0 -2.97238939679331 +11931 13520000 1 2 0.125824786481687 +7374 7080000 0 0 -1.8722893006199 +12130 6880000 1 2 -7.72068673638987 +6564 11480000 0 0 0.365234432995809 +11190 8440000 0 0 -7.59660914022173 +7685 5320000 0 0 0.0891522581372151 +11116 8880000 0 0 7.53720595295246 +7765 6720000 0 0 0.121805940382025 +11026 6200000 0 0 0.119477926831113 +11266 9960000 0 0 -7.59660914022173 +8459 7440000 0 0 -1.64585659198784 +7857 8120000 0 0 0.0891522581372151 +11179 5480000 0 0 -0.838323353293413 +7858 9280000 0 0 0.0146892583201774 +11858 6120000 0 0 -0.058992955547077 +7209 5640000 0 0 53.8922401893697 +3994 7640000 0 0 -80.7900108056189 +10615 6280000 0 0 1.48969312214626 +9466 7320000 0 0 -0.221181092903122 +6185 5560000 0 0 0.038178287067514 +5909 6800000 0 0 -0.551744956313205 +5540 10040000 0 0 0.0544989589301492 +5619 7760000 0 0 0.0544989589301492 +5688 8600000 1 1 -0.116244026348644 +11526 8e+05 0 0 1.45433391506691 +12058 2160000 0 0 -0.758663112569202 +7197 7240000 0 0 2.26546410056103 +11578 8280000 1 2 -0.838323353293413 +2510 8200000 0 0 -1.14280405091517 +6490 9040000 0 0 -5.79396041034532 +7259 7200000 0 0 53.8922401893697 +3143 11240000 0 0 9.43801552398515 +1672 13880000 0 0 1009.49676396777 +1917 7800000 0 0 -0.27841974050552 +10228 6e+05 0 0 -0.495372461305743 +2243 6640000 0 0 18.3106342000275 +6093 6960000 0 0 0.038178287067514 +7693 7240000 0 0 -2.08303522148858 +8823 6640000 0 0 -1.64585659198784 +7657 7680000 0 0 -2.08303522148858 +1773 7960000 0 0 0.6144911877912 +10411 5640000 0 0 -1.50166421549656 +6240 7240000 0 0 11.9812059514487 +2342 5160000 1 2 18.3106342000275 +7689 6360000 0 0 -2.08303522148858 +8657 12760000 0 0 1.16534278118412 +8185 1080000 0 0 12.472637050972 +7147 8920000 0 0 -9.83638412809451 +6508 9360000 0 0 -5.79396041034532 +6632 6480000 0 0 0.206532547999694 +6007 10720000 1 1 -7.43612968868264 +9356 9440000 0 0 -1.39604514845467 +7483 10600000 0 0 -0.643837817130288 +4228 5080000 0 0 -8.32287745713091 +7104 6040000 0 0 -9.83638412809451 +7192 7160000 0 0 53.8922401893697 +6444 9240000 0 0 0.038178287067514 +1814 6960000 0 0 0.6144911877912 +7616 5800000 0 0 0.0891522581372151 +6653 6560000 0 0 0.206532547999694 +2317 7160000 0 0 18.3106342000275 +9880 7080000 0 0 0.109093242636216 +7648 7920000 0 0 0.0891522581372151 +7962 6840000 0 0 0.121805940382025 +12078 3600000 0 0 -7.72068673638987 +5545 8080000 0 0 1.73410404624277 +9751 7720000 0 0 0.109093242636216 +5050 8440000 0 0 32.0164527967292 +12446 11480000 0 0 -0.140252454417955 +8957 9640000 0 0 -0.0445256204054556 +5769 5360000 0 0 -7.43612968868264 +11469 11280000 1 2 -7.59660914022173 +8010 5320000 0 0 -0.306651691632065 +9680 5400000 0 0 -0.221181092903122 +2325 9840000 0 0 1.78196249319118 +7303 10800000 0 0 -9.83638412809451 +12074 5880000 0 0 -0.058992955547077 +10442 9280000 1 2 5.17010811668441 +9099 8800000 0 0 0.343988589646793 +9666 10880000 0 0 -0.646502369334301 +11804 5600000 0 0 -1.66218771772668 +6138 3800000 0 0 -7.43612968868264 +6452 9240000 0 0 -2.17308231217996 +7930 7040000 0 0 0.121805940382025 +2332 10960000 0 0 18.3106342000275 +7494 7320000 0 0 -0.643837817130288 +5765 10640000 0 0 -4.71021048187684 +6171 7880000 1 1 -28.0437581612806 +6491 7760000 1 1 0.038178287067514 +1618 7080000 0 0 0.6144911877912 +11927 11240000 0 0 0.125824786481687 +9753 10600000 0 0 -0.221181092903122 +8167 5600000 0 0 1.40746568756009 +12296 6600000 0 0 -1.05283433953274 +6587 8e+06 0 0 -5.79396041034532 +8094 6680000 0 0 -2.57837679460885 +9010 10360000 0 0 -1.39604514845467 +10827 7120000 0 0 -5.18939898967453 +5821 11760000 0 0 138.411810554307 +6555 6160000 0 0 -5.79396041034532 +4289 7760000 0 0 -8.32287745713091 +10087 6840000 0 0 1.80352436566587 +9850 5920000 0 0 1.55966630395357 +6497 7720000 0 0 0.038178287067514 +10846 8560000 0 0 -2.11754728482204 +3483 6320000 0 0 -80.7900108056189 +7849 6400000 0 0 0.0146892583201774 +6231 11160000 0 0 -0.551744956313205 +3824 320000 1 2 -80.7900108056189 +12114 7040000 0 0 0.28943560057888 +11281 10200000 0 0 0.119477926831113 +5679 12320000 1 1 -7.43612968868264 +6309 5240000 0 0 -0.551744956313205 +7950 5040000 0 0 0.121805940382025 +12174 8480000 0 0 0.125824786481687 +8360 6200000 0 0 12.472637050972 +3283 5840000 0 0 9.43801552398515 +1950 7040000 0 0 -0.27841974050552 +8779 5760000 0 0 -1.64585659198784 +5517 5840000 0 0 -2.68556450220527 +7624 6880000 0 0 0.0891522581372151 +7729 5280000 0 0 0.0146892583201774 +8039 5320000 1 2 -2.57837679460885 +11238 5760000 0 0 0.119477926831113 +7808 6600000 0 0 0.0146892583201774 +9249 6240000 0 0 0.343988589646793 +10732 2240000 0 0 -2.11754728482204 +11716 5360000 0 0 -0.83662462309424 +8179 17600000 1 1 12.472637050972 +11583 7e+06 0 0 10.757870987591 +9159 7600000 0 0 0.343988589646793 +9173 5320000 0 0 -1.39604514845467 +10415 7040000 0 0 -0.495372461305743 +9449 9520000 0 0 24.0802989407914 +6748 5240000 1 1 0.206532547999694 +10974 8160000 0 0 7.53720595295246 +9247 8360000 0 0 0.343988589646793 +12084 5600000 1 2 -7.72068673638987 +7663 8960000 0 0 0.0891522581372151 +5862 8920000 0 0 -7.43612968868264 +7980 8840000 0 0 -0.306651691632065 +8238 11040000 0 0 12.472637050972 +11993 10440000 0 0 -0.058992955547077 +12273 6960000 0 0 -6.09945292724748 +6337 5280000 0 0 -3.1698779809611 +11175 8480000 0 0 -0.838323353293413 +11367 7040000 0 0 10.757870987591 +12267 5400000 0 0 -6.09945292724748 +9542 7880000 0 0 -0.646502369334301 +2449 5920000 0 0 -1.14280405091517 +6087 6400000 0 0 0.038178287067514 +9573 8480000 0 0 -0.221181092903122 +12210 5840000 0 0 5.35656754633371 +11246 6920000 0 0 1.45433391506691 +11425 9240000 0 0 10.757870987591 +5859 5080000 0 0 -0.116244026348644 +6988 6400000 0 0 63.7310840982386 +10657 10560000 0 0 -2.11754728482204 +9673 5200000 0 0 -0.221181092903122 +11951 5600000 0 0 0.125824786481687 +2399 7880000 0 0 1.78196249319118 +9172 5520000 0 0 -1.39604514845467 +10358 6280000 0 0 -1.12226232263806 +7384 9360000 0 0 -1.8722893006199 +1948 6200000 0 0 0.6144911877912 +7182 5480000 0 0 53.8922401893697 +8230 7440000 0 0 1.40746568756009 +11276 11720000 0 0 -0.838323353293413 +6191 12280000 0 0 -3.1698779809611 +11889 9680000 1 2 -0.156716040645066 +7914 6440000 0 0 0.0146892583201774 +10645 7160000 0 0 1.48969312214626 +6484 8200000 0 0 4.94032442818608 +7539 10520000 0 0 -2.08303522148858 +8125 9560000 0 0 1.40746568756009 +6086 6600000 0 0 0.314244744790559 +7994 9920000 0 0 69.3619169237608 +7524 8440000 0 0 -8.54848603133888 +11655 6920000 0 0 -0.292001994159963 +6043 5880000 0 0 -0.551744956313205 +7239 7560000 0 0 53.8922401893697 +4288 8160000 0 0 -8.32287745713091 +5132 7320000 0 0 32.0164527967292 +10640 1.2e+07 0 0 -2.11754728482204 +10879 5360000 0 0 -2.11754728482204 +9638 5400000 0 0 -0.221181092903122 +11746 6480000 0 0 -1.66218771772668 +8289 6720000 0 0 12.472637050972 +6283 5360000 0 0 0.038178287067514 +4048 11160000 0 0 -80.7900108056189 +2013 2120000 0 0 -0.27841974050552 +2220 7960000 0 0 18.3106342000275 +8246 12040000 1 1 12.472637050972 +4986 5600000 0 0 32.0164527967292 +7412 6160000 0 0 -1.8722893006199 +6496 10800000 0 0 4.94032442818608 +5750 10240000 1 1 -7.43612968868264 +7775 6160000 0 0 0.0146892583201774 +7967 9920000 0 0 0.0146892583201774 +11454 5440000 0 0 0.119477926831113 +11275 160000 0 0 0.119477926831113 +6225 7680000 0 0 -3.1698779809611 +11167 9160000 0 0 -7.59660914022173 +6377 8240000 0 0 0.365234432995809 +11902 9920000 1 2 0.125824786481687 +7363 6880000 0 0 -9.83638412809451 +1878 9680000 0 0 0.6144911877912 +10006 6760000 0 0 1.80352436566587 +8982 6e+06 0 0 -1.39604514845467 +3223 8240000 0 0 9.43801552398515 +11191 5720000 0 0 -0.838323353293413 +10893 6320000 1 2 0.119477926831113 +11700 5640000 0 0 -33.5652282911849 +11807 5520000 0 0 -0.758663112569202 +9654 5880000 1 1 1.55966630395357 +12378 10360000 0 0 -28.2106827541909 +6217 7560000 0 0 -28.0437581612806 +5893 7320000 0 0 0.0544989589301492 +7822 5360000 0 0 0.0146892583201774 +5718 10160000 1 1 0.0544989589301492 +12329 11400000 1 1 -28.2106827541909 +3148 9840000 0 0 9.43801552398515 +12132 7240000 0 0 -0.058992955547077 +11027 5120000 0 0 0.119477926831113 +3412 9880000 0 0 -80.7900108056189 +2519 10920000 0 0 -1.14280405091517 +12184 7880000 0 0 -0.095068684640276 +2296 6480000 0 0 1.78196249319118 +1835 8e+06 0 0 0.6144911877912 +4133 8480000 0 0 -8.32287745713091 +6316 6160000 0 0 -0.551744956313205 +10566 7920000 0 0 -1.12226232263806 +6674 5440000 0 0 0.365234432995809 +11315 5680000 0 0 1.45433391506691 +7694 8960000 0 0 0.0146892583201774 +11828 9880000 0 0 -0.83662462309424 +7826 8840000 0 0 0.0891522581372151 +1658 8440000 0 0 1009.49676396777 +12270 7960000 0 0 -6.09945292724748 +6175 6120000 0 0 -0.551744956313205 +6360 8120000 0 0 113.996529786003 +8048 6840000 0 0 -2.57837679460885 +9326 6560000 0 0 0.343988589646793 +12415 8640000 0 0 0.447304402417004 +7867 8280000 0 0 -0.306651691632065 +5565 8840000 0 0 -2.68556450220527 +12129 9640000 1 2 -7.72068673638987 +1887 9320000 0 0 0.6144911877912 +6470 9680000 0 0 -5.79396041034532 +11180 10760000 0 0 -0.838323353293413 +8316 8200000 0 0 0.706171323303662 +10332 9240000 0 0 -1.50166421549656 +6042 12600000 1 1 -0.546546008422188 +7518 7480000 0 0 -0.643837817130288 +12262 9200000 1 1 -6.09945292724748 +11811 2120000 1 2 -33.5652282911849 +12242 7360000 0 0 -0.140252454417955 +9697 6520000 0 0 0.109093242636216 +11950 11480000 1 2 0.28943560057888 +8203 12600000 0 0 -2.57837679460885 +8186 3320000 0 0 12.472637050972 +6574 8920000 0 0 4.94032442818608 +1760 5480000 1 2 -0.27841974050552 +6154 10200000 0 0 -0.551744956313205 +10533 10960000 0 0 5.17010811668441 +7668 10760000 1 1 0.0891522581372151 +5962 9640000 0 0 -2.56432968687719 +7848 7200000 0 0 0.0891522581372151 +9033 5280000 0 0 -0.0445256204054556 +6106 9640000 0 0 0.314244744790559 +5646 5440000 0 0 -2.68556450220527 +6122 6600000 0 0 0.038178287067514 +8377 9760000 0 0 12.472637050972 +8206 7360000 0 0 12.472637050972 +10201 7400000 0 0 -0.495372461305743 +11649 8400000 0 0 1.45433391506691 +8921 9440000 0 0 -1.39604514845467 +11521 7320000 0 0 -7.59660914022173 +11983 7080000 0 0 -8.74375475292944 +1667 10560000 0 0 1009.49676396777 +11690 5640000 0 0 -0.156716040645066 +11814 1040000 0 0 -1.66218771772668 +2388 9040000 0 0 1.78196249319118 +9810 9920000 0 0 -7.71527447920201 +11785 12040000 0 0 -33.5652282911849 +8020 6280000 0 0 12.472637050972 +10429 6840000 0 0 -1.12226232263806 +7750 7040000 0 0 0.0891522581372151 +9270 11160000 1 1 -1.39604514845467 +12093 7360000 0 0 0.125824786481687 +11453 9960000 0 0 10.757870987591 +5329 10440000 0 0 32.0164527967292 +5824 9440000 0 0 -4.71021048187684 +6824 5680000 0 0 123.662884927066 +6934 5760000 1 2 2.26546410056103 +9327 8560000 0 0 0.343988589646793 +5748 5400000 0 0 -0.116244026348644 +2589 8800000 0 0 -1.14280405091517 +9595 8720000 1 1 24.0802989407914 +6459 8200000 0 0 0.365234432995809 +6338 8e+06 0 0 -3.1698779809611 +7070 6800000 0 0 2.26546410056103 +6376 10920000 0 0 -2.17308231217996 +5567 5440000 0 0 1.73410404624277 +6012 7960000 1 1 0.0544989589301492 +4296 9280000 0 0 -8.32287745713091 +9482 5840000 0 0 24.0802989407914 +11978 10080000 0 0 -33.5652282911849 +8298 6560000 0 0 12.472637050972 +12430 10400000 0 0 0.447304402417004 +8985 5360000 0 0 -1.39604514845467 +10317 6840000 0 0 -1.12226232263806 +12409 6200000 0 0 -1.05283433953274 +8984 6880000 0 0 -0.0445256204054556 +5221 8e+06 0 0 32.0164527967292 +9088 9400000 0 0 0.343988589646793 +10649 7720000 0 0 -1.12226232263806 +7591 8160000 0 0 -8.54848603133888 +11543 5080000 0 0 -0.292001994159963 +6361 8080000 0 0 0.038178287067514 +8034 5560000 0 0 69.3619169237608 +3593 6400000 0 0 -80.7900108056189 +12241 6920000 0 0 0.125824786481687 +6935 10640000 0 0 123.662884927066 +8182 8e+05 0 0 12.472637050972 +5606 6680000 0 0 0.0544989589301492 +8178 480000 0 0 12.472637050972 +1912 6840000 0 0 -0.27841974050552 +8480 5160000 0 0 -1.64585659198784 +6193 8e+06 0 0 -0.551744956313205 +6857 12440000 0 0 63.7310840982386 +1944 11520000 0 0 -0.27841974050552 +7047 6360000 0 0 -9.83638412809451 +8181 6840000 0 0 69.3619169237608 +7227 6e+06 0 0 53.8922401893697 +8558 8040000 1 1 19.9033614698601 +6641 6640000 0 0 0.206532547999694 +9044 6760000 1 1 0.343988589646793 +5717 6920000 0 0 -0.116244026348644 +10157 10480000 0 0 1.80352436566587 +10284 5400000 0 0 -1.50166421549656 +4857 7280000 0 0 32.0164527967292 +7076 9320000 0 0 -9.83638412809451 +7893 8120000 1 2 1.40746568756009 +5666 7200000 0 0 0.0544989589301492 +6868 10040000 0 0 123.662884927066 +7545 6280000 0 0 0.0891522581372151 +9514 6280000 0 0 -0.221181092903122 +6711 9280000 0 0 123.662884927066 +6494 1760000 0 0 -5.79396041034532 +8614 6480000 0 0 -1.64585659198784 +6051 7560000 0 0 -0.551744956313205 +6219 5400000 0 0 0.038178287067514 +11245 7880000 0 0 0.119477926831113 +6943 8880000 0 0 53.8922401893697 +11834 6720000 0 0 -33.5652282911849 +2541 9e+06 0 0 -1.14280405091517 +5934 8560000 0 0 0.038178287067514 +12115 6840000 1 2 -0.095068684640276 +8129 6760000 0 0 12.472637050972 +6449 6200000 0 0 -5.79396041034532 +6305 5360000 1 1 75.4443389741743 +2306 5840000 0 0 18.3106342000275 +11984 10240000 0 0 -8.74375475292944 +11057 6560000 0 0 0.119477926831113 +7291 5520000 0 0 -1.8722893006199 +6467 6480000 0 0 -2.17308231217996 +1843 7640000 0 0 0.6144911877912 +10094 6440000 0 0 1.80352436566587 +11208 7640000 0 0 -7.59660914022173 +6235 6600000 0 0 -0.551744956313205 +3902 10120000 0 0 -80.7900108056189 +4333 9840000 0 0 602.524698133919 +10007 4680000 0 0 1.80352436566587 +6881 11080000 0 0 123.662884927066 +12412 5840000 0 0 0.447304402417004 +12022 10120000 0 0 0.125824786481687 +9770 5440000 0 0 -7.71527447920201 +7249 9440000 0 0 2.26546410056103 +11408 6480000 0 0 -0.838323353293413 +10029 9880000 0 0 1.80352436566587 +10674 10360000 1 2 1.48969312214626 +12314 6480000 1 1 -0.140252454417955 +8765 6760000 0 0 -1.64585659198784 +1849 6560000 0 0 -0.27841974050552 +11651 12440000 0 0 27.3757584875382 +6957 5520000 1 2 123.662884927066 +10018 2760000 0 0 -7.71527447920201 +11283 7840000 1 1 7.53720595295246 +2420 11440000 0 0 1.78196249319118 +9598 5360000 0 0 -0.646502369334301 +7489 7680000 0 0 -0.643837817130288 +8559 8400000 1 1 -1.64585659198784 +10134 5800000 0 0 5.17010811668441 +6565 7520000 0 0 4.94032442818608 +3682 8480000 0 0 -80.7900108056189 +8055 6480000 0 0 69.3619169237608 +8053 9040000 0 0 -0.306651691632065 +9493 5800000 0 0 -1.39604514845467 +5892 5200000 0 0 0.0544989589301492 +6837 9600000 0 0 123.662884927066 +7111 7760000 0 0 53.8922401893697 +11992 5880000 0 0 -2.97238939679331 +4585 6400000 1 2 602.524698133919 +9535 9920000 0 0 24.0802989407914 +11613 5440000 0 0 -33.5652282911849 +8870 5400000 0 0 -1.39604514845467 +6527 9240000 0 0 4.94032442818608 +8090 8480000 0 0 64.9882329460878 +10757 6400000 1 2 -5.18939898967453 +7587 8480000 0 0 0.0891522581372151 +6256 11640000 0 0 0.038178287067514 +6179 8520000 1 1 0.314244744790559 +6518 6880000 0 0 4.94032442818608 +9951 6560000 0 0 -7.71527447920201 +7347 5200000 0 0 -9.83638412809451 +2486 8440000 0 0 -1.14280405091517 +8594 6280000 0 0 1.16534278118412 +9509 7320000 0 0 24.0802989407914 +10979 5720000 0 0 7.53720595295246 +7301 9560000 0 0 -1.8722893006199 +10552 9200000 0 0 -1.50166421549656 +7963 5520000 0 0 -0.306651691632065 +5944 7120000 0 0 -0.551744956313205 +7396 11400000 0 0 2.26546410056103 +10380 5240000 0 0 -0.495372461305743 +9201 9480000 0 0 -1.39604514845467 +5533 5720000 0 0 -2.68556450220527 +12063 5560000 0 0 -0.095068684640276 +11313 10680000 0 0 10.757870987591 +6844 7480000 0 0 63.7310840982386 +11363 9040000 0 0 10.757870987591 +12039 8120000 1 2 -0.095068684640276 +11942 9080000 0 0 -0.095068684640276 +11321 7880000 0 0 -7.59660914022173 +8041 7760000 0 0 64.9882329460878 +7144 9640000 0 0 2.26546410056103 +12204 80000 0 0 -0.095068684640276 +6454 5080000 0 0 4.94032442818608 +10634 12360000 0 0 -1.12226232263806 +11500 5680000 0 0 -0.292001994159963 +4833 6560000 0 0 32.0164527967292 +6809 9960000 0 0 123.662884927066 +11261 6560000 0 0 0.119477926831113 +6063 10280000 0 0 -0.551744956313205 +10453 5440000 0 0 -1.12226232263806 +7332 8160000 0 0 53.8922401893697 +6780 10520000 0 0 63.7310840982386 +6073 6e+06 0 0 0.314244744790559 +5987 6280000 0 0 -0.116244026348644 +8236 8200000 0 0 1.40746568756009 +7842 5200000 0 0 0.0891522581372151 +5884 7040000 0 0 -2.56432968687719 +6288 7080000 0 0 -3.1698779809611 +6254 11240000 0 0 0.314244744790559 +11928 10560000 0 0 153.286929621036 +6698 9640000 0 0 0.206532547999694 +5231 10240000 0 0 32.0164527967292 +7641 9840000 1 1 -8.54848603133888 +9027 7560000 0 0 0.343988589646793 +6483 10760000 0 0 -5.79396041034532 +10293 7840000 1 1 -0.495372461305743 +7497 6160000 0 0 -8.54848603133888 +2630 4240000 0 0 -1.14280405091517 +7398 9800000 0 0 -8.54848603133888 +7933 7e+06 0 0 -0.306651691632065 +6558 6320000 0 0 0.206532547999694 +9562 9600000 0 0 -0.221181092903122 +2435 5240000 0 0 -1.14280405091517 +4148 8640000 0 0 -8.32287745713091 +11547 1240000 0 0 -7.59660914022173 +2166 10920000 0 0 1.78196249319118 +7821 6760000 0 0 -0.306651691632065 +11004 8640000 0 0 0.119477926831113 +9882 11840000 0 0 -7.71527447920201 +12120 8080000 0 0 -2.97238939679331 +1867 5720000 0 0 -0.27841974050552 +7353 9440000 0 0 -1.8722893006199 +5788 7880000 0 0 0.0544989589301492 +9515 8840000 0 0 24.0802989407914 +7743 9040000 0 0 0.0891522581372151 +9938 9560000 0 0 0.109093242636216 +2195 6520000 0 0 18.3106342000275 +11262 9600000 0 0 -7.59660914022173 +7589 5120000 0 0 -0.643837817130288 +7576 5760000 0 0 -2.08303522148858 +5965 11440000 0 0 0.0544989589301492 +6402 7e+06 0 0 0.038178287067514 +12256 8280000 1 2 5.35656754633371 +5564 8960000 0 0 -2.68556450220527 +6168 7080000 0 0 -0.551744956313205 +7724 9160000 0 0 0.0891522581372151 +7268 6400000 0 0 -1.8722893006199 +2473 8600000 0 0 -1.14280405091517 +11712 8120000 0 0 -0.156716040645066 +9624 7640000 0 0 24.0802989407914 +8286 6800000 0 0 12.472637050972 +10186 7880000 1 1 1.80352436566587 +11639 7800000 0 0 -1.66218771772668 +11223 9880000 0 0 -7.59660914022173 +11706 11520000 0 0 -1.66218771772668 +10734 7400000 0 0 -2.11754728482204 +3995 6680000 0 0 -80.7900108056189 +6004 9520000 0 0 -7.43612968868264 +11350 5280000 0 0 -7.59660914022173 +2248 9400000 0 0 18.3106342000275 +8021 7080000 1 2 64.9882329460878 +10908 11080000 1 2 -2.11754728482204 +6239 6960000 0 0 -3.1698779809611 +9444 8880000 0 0 24.0802989407914 +11625 9880000 1 2 -1.66218771772668 +10835 9e+06 0 0 1.48969312214626 +4106 7880000 0 0 -8.32287745713091 +11143 7400000 0 0 -0.838323353293413 +11903 5480000 0 0 -2.97238939679331 +12422 6560000 0 0 0.447304402417004 +5654 6480000 0 0 0.0544989589301492 +12275 5560000 0 0 -6.09945292724748 +2559 9160000 0 0 -1.14280405091517 +7485 7480000 0 0 -8.54848603133888 +6967 9360000 0 0 63.7310840982386 +7792 11200000 0 0 0.0146892583201774 +7167 8120000 0 0 63.7310840982386 +9145 6440000 0 0 -1.39604514845467 +11426 11040000 0 0 -7.59660914022173 +2572 8720000 0 0 -1.14280405091517 +10558 6e+06 1 2 -1.12226232263806 +10792 6720000 0 0 -2.11754728482204 +5729 7200000 0 0 -7.43612968868264 +9700 5640000 1 1 -0.646502369334301 +7951 9920000 0 0 0.121805940382025 +10683 8800000 0 0 1.48969312214626 +11635 6920000 0 0 -0.292001994159963 +6281 5320000 0 0 -3.1698779809611 +11136 9560000 0 0 0.119477926831113 +1951 9240000 0 0 -0.27841974050552 +12144 7720000 0 0 -0.058992955547077 +11419 9e+06 0 0 -0.838323353293413 +11887 9280000 1 2 0.28943560057888 +5684 5960000 1 1 1.73410404624277 +6530 6720000 0 0 -2.17308231217996 +7390 5320000 0 0 -8.54848603133888 +5846 6440000 0 0 -0.116244026348644 +7161 6400000 0 0 53.8922401893697 +2360 9440000 0 0 1.78196249319118 +2459 8480000 0 0 -1.14280405091517 +9168 6880000 0 0 -1.39604514845467 +11819 10360000 1 2 -0.83662462309424 +8450 9760000 0 0 12.472637050972 +8712 8120000 0 0 -1.64585659198784 +7716 8200000 0 0 -0.306651691632065 +8124 7280000 0 0 -2.57837679460885 +2311 5440000 0 0 18.3106342000275 +7146 8920000 0 0 -9.83638412809451 +12410 6280000 0 0 -0.140252454417955 +11316 6800000 0 0 -7.59660914022173 +9458 5840000 0 0 -0.646502369334301 +7397 9640000 0 0 -2.08303522148858 +2552 9880000 0 0 -1.14280405091517 +7258 5360000 0 0 -1.8722893006199 +8225 5880000 0 0 69.3619169237608 +8204 8720000 1 1 12.472637050972 +2540 18040000 0 0 -1.14280405091517 +8054 10960000 0 0 -2.57837679460885 +11320 8800000 0 0 0.119477926831113 +11075 6200000 0 0 0.119477926831113 +11909 10760000 0 0 -0.758663112569202 +10722 6960000 0 0 -2.11754728482204 +12246 5200000 0 0 -6.09945292724748 +9225 9240000 0 0 0.343988589646793 +8700 120000 0 0 1.16534278118412 +10130 5280000 0 0 -0.495372461305743 +3222 5400000 1 2 9.43801552398515 +8849 9560000 0 0 -1.39604514845467 +3909 5120000 0 0 -80.7900108056189 +7577 5360000 0 0 -2.08303522148858 +10697 520000 0 0 5.17010811668441 +8049 7120000 0 0 69.3619169237608 +12028 9760000 0 0 -0.058992955547077 +11782 6880000 0 0 -0.758663112569202 +5738 11040000 0 0 0.0544989589301492 +10537 10880000 0 0 5.17010811668441 +11948 9960000 0 0 -2.97238939679331 +7938 5800000 1 1 0.121805940382025 +1819 9920000 0 0 0.6144911877912 +7061 9360000 0 0 -9.83638412809451 +5968 7560000 0 0 11.9812059514487 +7929 10720000 0 0 69.3619169237608 +7969 7080000 1 2 64.9882329460878 +11416 11960000 0 0 -0.292001994159963 +12126 7800000 0 0 -2.97238939679331 +8056 8040000 0 0 -0.306651691632065 +11980 6360000 0 0 0.125824786481687 +11460 5480000 0 0 -7.59660914022173 +7843 7320000 0 0 -0.306651691632065 +12142 9760000 0 0 -0.095068684640276 +3780 5320000 0 0 -80.7900108056189 +11499 5840000 0 0 10.757870987591 +7222 10800000 0 0 53.8922401893697 +7365 9440000 0 0 2.26546410056103 +7436 7800000 0 0 -8.54848603133888 +11493 5760000 0 0 -0.156716040645066 +7450 6e+06 0 0 -1.8722893006199 +10745 8640000 0 0 -2.11754728482204 +11224 10760000 0 0 -7.59660914022173 +5797 9960000 1 1 -0.116244026348644 +1623 5320000 0 0 0.6144911877912 +7344 8160000 0 0 -8.54848603133888 +1799 8760000 0 0 0.6144911877912 +5659 9360000 0 0 -2.68556450220527 +11938 5360000 0 0 -0.758663112569202 +12006 7800000 1 2 -23.6978149367615 +7484 10120000 1 1 0.0891522581372151 +1774 9440000 0 0 0.6144911877912 +11486 840000 0 0 1.45433391506691 +8104 7320000 0 0 12.472637050972 +6052 8320000 0 0 0.038178287067514 +11349 7720000 0 0 -7.59660914022173 +12073 6880000 0 0 -8.74375475292944 +11368 5880000 0 0 -7.59660914022173 +8804 8840000 1 1 -1.64585659198784 +11390 10200000 0 0 -7.59660914022173 +7759 6560000 0 0 0.0891522581372151 +2440 7240000 0 0 -1.14280405091517 +7156 5160000 0 0 53.8922401893697 +9144 5160000 0 0 -1.39604514845467 +8962 5320000 0 0 -0.0445256204054556 +11944 5760000 0 0 153.286929621036 +11852 9200000 0 0 -8.74375475292944 +4114 6600000 0 0 -8.32287745713091 +6163 5600000 0 0 -0.546546008422188 +10971 7680000 0 0 0.119477926831113 +7339 2e+05 0 0 -8.54848603133888 +2304 8160000 0 0 1.78196249319118 +11961 5240000 1 2 153.286929621036 +11758 8480000 0 0 -1.66218771772668 +7819 5560000 0 0 1.40746568756009 +7863 5520000 0 0 0.0891522581372151 +11644 8320000 1 2 -0.83662462309424 +12301 8680000 0 0 -0.095068684640276 +7250 5480000 0 0 2.26546410056103 +1617 10200000 0 0 0.6144911877912 +6183 5320000 0 0 -3.1698779809611 +2462 6120000 0 0 1.78196249319118 +7071 10120000 1 2 -1.8722893006199 +6115 6e+05 0 0 -7.43612968868264 +11546 7760000 0 0 1.45433391506691 +11571 7760000 0 0 10.757870987591 +6712 6040000 0 0 0.206532547999694 +7684 4320000 0 0 0.0891522581372151 +8058 5200000 0 0 0.0146892583201774 +5067 5320000 0 0 -2.68556450220527 +6408 9960000 0 0 -2.17308231217996 +11314 5960000 0 0 10.757870987591 +9926 10720000 0 0 -7.71527447920201 +8135 9600000 0 0 12.472637050972 +7302 7360000 0 0 -9.83638412809451 +8550 5520000 0 0 1.16534278118412 +6753 5200000 0 0 0.365234432995809 +4142 7440000 0 0 -8.32287745713091 +8134 5440000 0 0 -0.306651691632065 +10463 7640000 0 0 -1.50166421549656 +11935 8080000 1 1 18.6142102172637 +7469 7800000 0 0 -0.643837817130288 +11823 5200000 0 0 -1.66218771772668 +7787 10800000 1 2 0.0146892583201774 +9443 9800000 1 1 24.0802989407914 +6145 7320000 0 0 -3.1698779809611 +12313 5520000 0 0 -1.05283433953274 +10712 5920000 0 0 -5.18939898967453 +3728 6680000 0 0 -80.7900108056189 +2128 8640000 0 0 18.3106342000275 +8942 8240000 0 0 -0.0445256204054556 +7042 10440000 0 0 53.8922401893697 +5122 6200000 0 0 32.0164527967292 +12106 5280000 1 2 -16.9158143194335 +10022 8280000 0 0 -0.495372461305743 +5442 6840000 0 0 -2.68556450220527 +9619 6920000 0 0 24.0802989407914 +12427 7680000 0 0 0.447304402417004 +12186 9040000 0 0 -8.74375475292944 +8543 9400000 1 1 -1.64585659198784 +9451 8680000 0 0 -1.39604514845467 +8640 5040000 0 0 -1.64585659198784 +7921 10200000 0 0 1.40746568756009 +5230 7120000 0 0 32.0164527967292 +11485 6760000 0 0 10.757870987591 +10457 8120000 0 0 -1.50166421549656 +7877 1.2e+07 0 0 0.0146892583201774 +8651 5480000 0 0 1.16534278118412 +12298 11320000 0 0 -0.140252454417955 +7379 7920000 0 0 -0.643837817130288 diff --git a/modules/hrudelin_1_init.py b/modules/hrudelin_1_init.py index 27b52d997e45281243e49b5d9b8b0ca53e29d95b..538da85b02242a18dd401abbdb2ed754dc80f0bc 100755 --- a/modules/hrudelin_1_init.py +++ b/modules/hrudelin_1_init.py @@ -8,6 +8,8 @@ # AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University # by IRSTEA - Christine Barachet, # Julien Veyssier +# Michael Rabotin +# Florent Veillon # PURPOSE: Prepare files for the next steps of HRU Delineation # shapefile : selected gauges # rasters : bounded DEM and others (geology, soils, landuse) @@ -631,6 +633,35 @@ if __name__ == '__main__': print('or if you don\'t have superuser access:\n') print('%spip3 install tqdm%s\n' % (COLOR_GREEN, COLOR_RESET)) sys.exit(1) + try: + from rastertodataframe import raster_to_dataframe + except Exception as e: + print('!!! %s rastertodataframe python3 module not found !!%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + try: + import geopandas as gpd + except Exception as e: + print('!!! %s geopandas python3 module not found !!%s\n' % (COLOR_RED, COLOR_RESET)) + print('On Debian/Ubuntu/Linux Mint you can install it with:\n') + print('%ssudo apt install python3-geopandas%s\n' % (COLOR_GREEN, COLOR_RESET)) + print('or if you don\'t have superuser access:\n') + print('%spip3 install geopandas%s\n' % (COLOR_GREEN, COLOR_RESET)) + sys.exit(1) + try: + import rtree + except Exception as e: + print('!!! %s rtree python3 module not found !!%s\n' % (COLOR_RED, COLOR_RESET)) + print('On Debian/Ubuntu/Linux Mint you can install it with:\n') + print('%ssudo apt install python3-rtree%s\n' % (COLOR_GREEN, COLOR_RESET)) + print('or if you don\'t have superuser access:\n') + print('%spip3 install rtree%s\n' % (COLOR_GREEN, COLOR_RESET)) + sys.exit(1) + try: + import pygeos + except Exception as e: + print('!!! %s pygeos python3 module not found !!%s\n' % (COLOR_RED, COLOR_RESET)) + print('%spip install pygeos%s\n' % (COLOR_GREEN, COLOR_RESET)) + sys.exit(1) parms_file = 'hrudelin_config.cfg' if len(sys.argv) > 1: @@ -639,7 +670,7 @@ if __name__ == '__main__': main(parms_file) try: - os.system('notify-send "hru-delin step 1 complete"') + os.system('notify-send "hru-delin-6-2 step 1 complete"') except Exception as e: pass else: diff --git a/modules/hrudelin_2_1_env_relocate.py b/modules/hrudelin_2_1_env_relocate.py new file mode 100755 index 0000000000000000000000000000000000000000..c5a9bd4710b19ff4565b1feb03fa125c59e27cfd --- /dev/null +++ b/modules/hrudelin_2_1_env_relocate.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + print("-----------------------------------------------------------------------------------") + print('---------- HRU-delin Step 2-1 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + """OUTPUT files + - gauges_reloc.csv + - dams_reloc.csv + - step2_dams_for_watersheds.shp + - step2_gauges_for_watersheds.shp + """ + + #################### + #GRASS ENVIRONNEMENT + #################### + + configFileDir = os.path.dirname(parms_file) + # create main env + buildGrassEnv(os.path.join(configFileDir, 'grass_db'), 'hru-delin') + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + # Get parameters from configuration file + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + + + + # test parameters from configuration file + # if auto_relocation == yes, test int value for surface_tolerance_1 and distance_tolerance_1 + if (parms.get('auto_relocation', 'to_do')) == 'yes': + + if not isint(parms.get('auto_relocation', 'surface_tolerance_1')): + sys.exit('------------> ERROR : Surface_tolerance_1 value not provided or is not integer' ) + if not isint(parms.get('auto_relocation', 'distance_tolerance_1')): + sys.exit('------------> ERROR : Distance_tolerance_1 value not provided or is not integer' ) + + + ## test if basin min size is valid + if not isint(parms.get('basin_min_size', 'size')): + sys.exit('------------> ERROR : Basin min size value not provided or is not integer' ) + + + ####### + #GAUGES + ####### + + # Get the shape of gauges + gauges_file = parms.get('gauges', 'relocated_gauges') + if gauges_file == '': + gauges_file = os.path.join(directory_out, 'gauges_selected.shp') + else: + if ogr.Open(gauges_file) is None: + sys.exit('------------> ERROR : Relocated Gauges file not found') + + + gauges_in = ogr.Open(gauges_file) + gauges_lyr = gauges_in.GetLayer() + + # Set the new shape + gauges_reloc_name = 'step2_gauges_for_watersheds' + gauges_reloc_file = os.path.join(directory_out, gauges_reloc_name + '.shp') + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(gauges_reloc_file): + driver.DeleteDataSource(gauges_reloc_file) + gauges_reloc_shp = driver.CreateDataSource(gauges_reloc_file) + gauges_reloc_lyr = gauges_reloc_shp.CopyLayer(gauges_lyr, gauges_reloc_name) + + # Relocation of the gauges + if (parms.get('auto_relocation', 'to_do')) == 'yes': + print('---------- HRU-delin Step 2-1 : Relocation of the gauges') + + gauges_area_col_name = parms.get('gauges', 'gauges_area_col_name') + gauges_col_name = parms.get('gauges', 'gauges_col_name') + snapping_points_to_reaches(parms, directory_out,gauges_col_name,gauges_area_col_name,gauges_reloc_lyr,'gauges_reloc.csv','gauges') + + gauges_reloc_shp.ExecuteSQL('REPACK ' + gauges_reloc_lyr.GetName()) + gauges_reloc_shp.Destroy() + + ##### + #DAMS + ##### + + # relocation of dams if provided + dams_reloc_file=0 + if str(parms.get('dams', 'to_do')) == 'yes': + # Get the shape of dams + dams_file = parms.get('dams', 'relocated_dams') + if dams_file == '': + dams_file = os.path.join(directory_out, 'dams_selected.shp') + else: + if ogr.Open(dams_file) is None: + sys.exit('------------> ERROR : Relocated dams file not found') + + + dams_in = ogr.Open(dams_file) + dams_lyr = dams_in.GetLayer() + + # Set the new shape + dams_reloc_name = 'step2_dams_for_watersheds' + dams_reloc_file = os.path.join(directory_out, dams_reloc_name + '.shp') + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(dams_reloc_file): + driver.DeleteDataSource(dams_reloc_file) + dams_reloc_shp = driver.CreateDataSource(dams_reloc_file) + dams_reloc_lyr = dams_reloc_shp.CopyLayer(dams_lyr, dams_reloc_name) + + # Relocation of the dams + if (parms.get('auto_relocation', 'to_do')) == 'yes': + print('---------- HRU-delin Step 2-1 : Relocation of the dams') + + dams_area_col_name = parms.get('dams', 'dams_area_col_name') + dams_col_name = parms.get('dams', 'dams_col_name') + snapping_points_to_reaches(parms, directory_out,dams_col_name,dams_area_col_name,dams_reloc_lyr,'dams_reloc.csv','dams') + + dams_reloc_shp.ExecuteSQL('REPACK ' + dams_reloc_lyr.GetName()) + dams_reloc_shp.Destroy() + + + + + # Import drain raster + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=drain_wk, stdout=DEVNULL, stderr=DEVNULL) + + # Watersheds derivation + basins = 'basins' + rasters_list = ['basins'] + + # r.cross doesn't accept more than 30 layers + max_rasters = 29 + i = 0 + grass_run_command('r.mapcalc', expression='basins=null()', overwrite=True, stdout=DEVNULL, stderr=DEVNULL) + #gauges + gaugesDs = ogr.Open(gauges_reloc_file) + gauges_reloc_lyr = gaugesDs.GetLayer() + nb_gauges = gauges_reloc_lyr.GetFeatureCount() + #dams + nb_dams=0 + + + list_point=[] + if str(parms.get('dams', 'to_do')) == 'yes': + dams_col_name = parms.get('dams', 'dams_col_name') + damsDs = ogr.Open(dams_reloc_file) + dams_reloc_lyr = damsDs.GetLayer() + nb_dams = dams_reloc_lyr.GetFeatureCount() + + for dam in dams_reloc_lyr: + geom = dam.GetGeometryRef() + dam_x, dam_y = geom.GetX(), geom.GetY() + dam_ID=int(dam.GetField(dams_col_name)) + tuple_dam=(dam_x,dam_y,dam_ID) + list_point.append(tuple_dam) + + nb_points=nb_gauges+nb_dams + + #test if nb_points still has feature + if nb_points == 0: + print("Error on gauges (and dams if provided) relocated layer : 0 features found ") + print("Maybe check basin min size parameter") + sys.exit() + + gauges_col_name = parms.get('gauges', 'gauges_col_name') + for gauge in gauges_reloc_lyr: + geom = gauge.GetGeometryRef() + gauge_x, gauge_y = geom.GetX(), geom.GetY() + gauge_ID=int(gauge.GetField(gauges_col_name)) + tuple_gauge=(gauge_x,gauge_y,gauge_ID) + list_point.append(tuple_gauge) + + #export of list_point + df_list_point = pd.DataFrame(list_point) + df_list_point.to_csv(os.path.join(directory_out,"list_point.csv"),index=False, header=False) + + #TEST EXIST FILES AND FILL FILES + print('---------- HRU-delin Step 2-1 : Test of existing and completed files') + #GAUGES + + #gauges_reloc + gauges_reloc_path = os.path.join(directory_out, 'gauges_reloc.csv') + if (parms.get('auto_relocation', 'to_do')) == 'yes': + if os.stat(gauges_reloc_path).st_size == 0: + print('--------------- gauges_reloc.csv is empty or nonexistent') + else: + pd_gauges_reloc = pd.read_csv(gauges_reloc_path) + len_gauges_reloc = len(pd_gauges_reloc) + if len_gauges_reloc > 0 : + print('--------------- gauges_reloc.csv is created and it has ', len_gauges_reloc, " lines") + else : + print("--------------- gauges_reloc.csv is created but it empty") + + + #gauges_selected + gauges_selected_path = os.path.join(directory_out, 'gauges_selected.shp') + + if os.stat(gauges_selected_path).st_size == 0: + print('--------------- gauges_selected.shp is empty or nonexistent') + else: + datasource_gauges = ogr.Open(gauges_selected_path) + layer_gauges = datasource_gauges.GetLayer() + featureCount_gauges = layer_gauges.GetFeatureCount() + if featureCount_gauges > 0 : + print('--------------- gauges_selected.shp is created and it has ', featureCount_gauges, " features") + else : + print("--------------- gauges_selected.shp is created but it empty") + + + #DAMS + if str(parms.get('dams', 'to_do')) == 'yes': + #dams_reloc + dams_reloc_path = os.path.join(directory_out, 'dams_reloc.csv') + + if os.stat(dams_reloc_path).st_size == 0: + print('--------------- dams_reloc.csv is empty or nonexistent') + else: + pd_dams_reloc = pd.read_csv(dams_reloc_path) + len_dams_reloc = len(pd_dams_reloc) + if len_dams_reloc > 0 : + print('--------------- dams_reloc.csv is created and it has ', len_dams_reloc, " lines") + else : + print("--------------- dams_reloc.csv is created but it empty") + + #dams_selected + dams_selected_path = os.path.join(directory_out, 'dams_selected.shp') + + if os.stat(dams_selected_path).st_size == 0: + print('--------------- dams_selected.shp is empty or nonexistent') + else: + datasource_dams = ogr.Open(dams_selected_path) + layer_dams = datasource_dams.GetLayer() + featureCount_dams = layer_dams.GetFeatureCount() + if featureCount_dams > 0 : + print('--------------- dams_selected.shp is created and it has ', featureCount_dams, " features") + else : + print("--------------- dams_selected.shp is created but it empty") + + + print('---------- HRU-delin Step 2-1 ended ---------------------------------------------') + + if generator: + yield 10 + + +#MAIN _PARAM +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-1 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_2_derivation_watershed.py b/modules/hrudelin_2_2_derivation_watershed.py new file mode 100755 index 0000000000000000000000000000000000000000..a99ac2bcd3a075b1103f65e762cee3121ce208b2 --- /dev/null +++ b/modules/hrudelin_2_2_derivation_watershed.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +from rastertodataframe import raster_to_dataframe + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + - basins.tif + """ + print(" ") + print('---------- HRU-delin Step 2-2 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + + #open list_point.csv + file = pd.read_csv(os.path.join(directory_out,"list_point.csv"),header=None) + #convert to tuples + list_point = list(file.itertuples(index=False, name=None)) + nb_points = len(list_point) + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + # Import drain raster + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=drain_wk, stdout=DEVNULL, stderr=DEVNULL) + + i = 0 + basins = 'basins' + rasters_list = ['basins'] + max_rasters = 29 + + + + print('---------- HRU-delin Step 2-2 : r.water.outlet') + + for points in list_point: + + grass_run_command('r.water.outlet', + input=drain_wk, output='basin_tmp%d' % i, + coordinates='%s,%s' % (points[0], points[1]), + overwrite='True', + stdout=DEVNULL, stderr=DEVNULL + ) + + + rasters_list.append('basin_tmp%d' % i) + i += 1 + + if i % max_rasters == 0 or i == nb_points: + grass_run_command('r.cross', input=','.join(rasters_list), output=basins, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.remove', flags='f', type='raster', pattern='basin_tmp*', stdout=DEVNULL, stderr=DEVNULL) + rasters_list = ['basins'] + + if generator: + yield 5 + + # with grass7 watershed ids begin at 0 + # empty areas value is already NULL, no need to set it + grass_run_command('r.mapcalc', expression='basins=basins+1', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + #TODO remove this line + grass_run_command('r.out.gdal', input='basins', output=os.path.join(directory_out, 'basins.tif'), + overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + + + #TEST EXIST FILES AND FILL FILES + print('---------- HRU-delin Step 2-2 : Test of existing and completed files') + + #basins.tif + tif_path = os.path.join(directory_out, 'basins.tif') + + if os.stat(tif_path).st_size == 0: + print('--------------- basins.tif is empty or nonexistent') + else: + tif_gdal = gdal.Open(tif_path) + tif_band = tif_gdal.GetRasterBand(1) + (min_tif,max_tif) = tif_band.ComputeRasterMinMax(True) + + if min_tif > 0 and max_tif <= nb_points: + print('--------------- basins.tif is created and it has',nb_points, "watersheds") + else : + print("--------------- basins.tif is created but it empty") + + print('---------- HRU-delin Step 2-2 ended ---------------------------------------------') + + + + + + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-2 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_3_watershed_reclassification.py b/modules/hrudelin_2_3_watershed_reclassification.py new file mode 100755 index 0000000000000000000000000000000000000000..3b005f1c1c48b17778067419e2bda6bd31d1c782 --- /dev/null +++ b/modules/hrudelin_2_3_watershed_reclassification.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +from rastertodataframe import raster_to_dataframe + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + - step2_watersheds.tif + """ + + print(" ") + print('---------- HRU-delin Step 2-3 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + + #open list_point.csv + file = pd.read_csv(os.path.join(directory_out,"list_point.csv"),header=None) + #convert to tuples + list_point = list(file.itertuples(index=False, name=None)) + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + # Import drain raster + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=drain_wk, stdout=DEVNULL, stderr=DEVNULL) + # Import basins raster + basins_tif = os.path.join(directory_out, 'basins.tif') + basins = "basins" + + + basins_rcl = 'basins_rcl' + + #create a GRASS basins raster + grass_run_command('r.in.gdal', flags='o', input=basins_tif, output=basins, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + pRecode = grass_feed_command('r.recode', input=basins, output=basins_rcl, overwrite='True', rules='-', quiet=True) + + + for points in list_point: + out = decode(grass_read_command('r.what', map=basins, coordinates='%s,%s' % (points[0], points[1]))) + cat_grass = out.rstrip(os.linesep).split('|')[3].strip() + pRecode.stdin.write(encode('%s:%s:%s\n' % (cat_grass, cat_grass, points[2]))) + pRecode.stdin.close() + pRecode.wait() + + grass_run_command('r.out.gdal', + input=basins_rcl, + type='UInt16', + output=os.path.join(directory_out, 'step2_watersheds.tif'), + overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + + #TEST EXIST FILES AND FILL FILES + print('---------- HRU-delin Step 2-3 : Test of existing and completed files') + + #basins.tif + tif_path = os.path.join(directory_out, 'step2_watersheds.tif') + + if os.stat(tif_path).st_size == 0: + print('--------------- step2_watersheds.tif is empty or nonexistent') + else: + tif_gdal = gdal.Open(tif_path) + tif_band = tif_gdal.GetRasterBand(1) + (min_tif,max_tif) = tif_band.ComputeRasterMinMax(True) + + if min_tif > 0 : + print('--------------- step2_watersheds.tif is created') + else : + print("--------------- step2_watersheds.tif is created but it empty") + + + if generator: + yield 10 + + + print('---------- HRU-delin Step 2-3 ended ---------------------------------------------') + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-3 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_4_mask.py b/modules/hrudelin_2_4_mask.py new file mode 100755 index 0000000000000000000000000000000000000000..889b0f9d71b1820a9d3adbbe69ba9a3d62b98c58 --- /dev/null +++ b/modules/hrudelin_2_4_mask.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +from rastertodataframe import raster_to_dataframe + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + - step2_mask.tif + """ + print(" ") + print('---------- HRU-delin Step 2-4 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + # Import drain raster + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=drain_wk, stdout=DEVNULL, stderr=DEVNULL) + + #open list_point.csv + file = pd.read_csv(os.path.join(directory_out,"list_point.csv"),header=None) + #convert to tuples + list_point = list(file.itertuples(index=False, name=None)) + # Get parameters from configuration file + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + + basins_rcl_tif = os.path.join(directory_out, 'step2_watersheds.tif') + basins_rcl = "basins_rcl" + + grass_run_command('r.in.gdal', flags='o', input=basins_rcl_tif, output=basins_rcl, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + + grass_run_command('r.null', map=basins_rcl, setnull=0, stdout=DEVNULL, stderr=DEVNULL) + mask_tmp = 'mask_tmp' + pReclass = grass_feed_command('r.reclass', input=basins_rcl, output=mask_tmp, overwrite='True', rules='-') + pReclass.stdin.write(encode('0 thru 10000000 = 1\n')) + pReclass.stdin.close() + pReclass.wait() + + # TODO check that (was removed before) + #grass_run_command('r.null', map=mask_tmp, null=0, setnull=1, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('r.out.gdal', input=mask_tmp, type='UInt16', output=os.path.join(directory_out, 'step2_mask.tif'), + overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + # exit if mask is too small and reloc is disabled + inGd = gdal.Open(os.path.join(directory_out, 'step2_mask.tif')) + inband1 = inGd.GetRasterBand(1) + indata1 = BandReadAsArray(inband1) + inNodata = inband1.GetNoDataValue() + + nbGoodValues = np.count_nonzero(indata1 != inNodata) + xl = inGd.RasterXSize + yl = inGd.RasterYSize + # mask represents less than 0.1 % of area + maskProportion = nbGoodValues / (xl * yl) * 100 + maskIsTooSmall = maskProportion < 0.1 + if parms.get('auto_relocation', 'to_do') != 'yes' and maskIsTooSmall: + sys.exit('!!! Relocation is disabled and you are getting a very small mask. Please consider enabling gauge relocation.') + + # Cutting the streams at gauges + listReachID=cut_streams_at_points(directory_out,list_point,'step1_streams.tif','step2_streams_new.tif') + + #update attribute table with ReachID + gauges_col_name = parms.get('gauges', 'gauges_col_name') + updateAttributeTable(directory_out,listReachID,'step2_gauges_for_watersheds.shp',gauges_col_name) + + if str(parms.get('dams', 'to_do')) == 'yes': + dams_col_name = parms.get('dams', 'dams_col_name') + updateAttributeTable(directory_out,listReachID,'step2_dams_for_watersheds.shp',dams_col_name) + + if generator: + yield 20 + + print('---------- HRU-delin Step 2-4 ended ---------------------------------------------') + + if generator: + yield 15 + + + + + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-4 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_5_cut_stream_subbassins.py b/modules/hrudelin_2_5_cut_stream_subbassins.py new file mode 100755 index 0000000000000000000000000000000000000000..c175b2644b0a1af12bfe2376c7abdf66770e2893 --- /dev/null +++ b/modules/hrudelin_2_5_cut_stream_subbassins.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +from rastertodataframe import raster_to_dataframe + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + + """ + print(" ") + print('---------- HRU-delin Step 2-5 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + + # Import drain raster + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=drain_wk, stdout=DEVNULL, stderr=DEVNULL) + + #open list_point.csv + file = pd.read_csv(os.path.join(directory_out,"list_point.csv"),header=None) + #convert to tuples + list_point = list(file.itertuples(index=False, name=None)) + # Get parameters from configuration file + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + + # Cutting the streams at gauges + listReachID=cut_streams_at_points(directory_out,list_point,'step1_streams.tif','step2_streams_new.tif') + + #update attribute table with ReachID + gauges_col_name = parms.get('gauges', 'gauges_col_name') + updateAttributeTable(directory_out,listReachID,'step2_gauges_for_watersheds.shp',gauges_col_name) + + if str(parms.get('dams', 'to_do')) == 'yes': + dams_col_name = parms.get('dams', 'dams_col_name') + updateAttributeTable(directory_out,listReachID,'step2_dams_for_watersheds.shp',dams_col_name) + + if generator: + yield 20 + + print('---------- HRU-delin Step 2-5 ended ---------------------------------------------') + + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-5 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_6_parrallele.py b/modules/hrudelin_2_6_parrallele.py new file mode 100755 index 0000000000000000000000000000000000000000..79dc97e0866fdd1cd669094fa53738148b246bd1 --- /dev/null +++ b/modules/hrudelin_2_6_parrallele.py @@ -0,0 +1,374 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +from rastertodataframe import raster_to_dataframe + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + - step2_mask.tif + """ + print(" ") + print('---------- HRU-delin Step 2-6 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + + #open list_point.csv + #file = pd.read_csv(os.path.join(directory_out,"list_point.csv")) + #convert to tuples + #list_point = list(file.itertuples(index=False, name=None)) + # Get parameters from configuration file + #parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') +# if not os.path.isdir(tmpPath): +# os.mkdir(tmpPath) +# parms.read(parms_file) + + # Import drain raster + print('---------- Importing raster \'step1_drain.tif\'') + drain_layer = os.path.join(directory_out, 'step1_drain.tif') + drain_wk = 'drain_wk' + grass_run_command('r.in.gdal', flags='o', input=drain_layer, output=drain_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + + print('---------- Importing raster \'step1_dem_reclass.tif\'') + dem_recl = os.path.join(directory_out, 'step1_dem_reclass.tif') + grass_run_command('g.proj', flags='c', georef=dem_recl, stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Importing raster \'step1_subbasins.tif\'') + subbasins = os.path.join(directory_out, 'step1_subbasins.tif') + grass_run_command('r.in.gdal', flags='o', input=subbasins, output='subbasins', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Importing raster \'step2_watersheds.tif\'') + basins = os.path.join(directory_out, 'step2_watersheds.tif') + grass_run_command('r.in.gdal', flags='o', input=basins, output='watersheds', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Importing raster \'step2_streams_new.tif\'') + reachraster = os.path.join(directory_out, 'step2_streams_new.tif') + grass_run_command('r.in.gdal', flags='o', input=reachraster, output='reachraster', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Compute cross product \'watersheds*100000+subbasins\'') + grass_run_command('r.mapcalc', expression='cross1=watersheds*100000+subbasins', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Compute cross product cross1*reachraster') + grass_run_command('r.mapcalc', expression='cross2=if(reachraster!=0, cross1, 0)', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) +# + if generator: + yield 20 + + print('---------- Setting nulls in \'cross1*reachraster\' and \'reachraster\'') + grass_run_command('r.null', map='cross2', setnull=0, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('r.null', map='reachraster', setnull=0, stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Saving \'step2_subbasins.tif\' (watersheds*100000+subbasins)') + grass_run_command('r.out.gdal', input='cross1', output=os.path.join(directory_out, 'step2_subbasins.tif'), + overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Computing links between (watersheds*100000+subbasins) and reach ids') + reach_ids = decode(grass_read_command('r.stats', quiet=True, flags='nN', input='reachraster')).rstrip(os.linesep).split(os.linesep) + reach_ids_cleaned = [] + subbasins_cleaned = [] + n_reach = len(reach_ids) + print('') + # main loop + + # export rasters that are necessary for parallel environments + rastersForWorkers = { + 'reachraster': os.path.join(tmpPath, 'step2_reachraster.tif'), + 'cross1': os.path.join(tmpPath, 'step2_cross1.tif'), + } + exportRasters(rastersForWorkers) + + # save main grass env which is being overriden later + MAIN_GISRC = os.environ['GISRC'] + + # build the environments and load exported rasters in each of them + grassDbPath = os.path.join(configFileDir, 'grass_db') + for i in range(nbProc): + location = 'hru-delin_%s' % (i+1) + buildGrassLocation(grassDbPath, location) + # set projection + # TODO test with a raster we want to pass to // + os.environ['GISRC'] = os.path.join(grassDbPath, 'grassdata', location, '.grassrc') + dem_recl = os.path.join(directory_out, 'step1_dem_reclass.tif') + grass_run_command('g.proj', flags='c', georef=dem_recl, stdout=DEVNULL, stderr=DEVNULL) + + importRastersInEnv(rastersForWorkers, grassDbPath, location) + + nbReachs = len(reach_ids) + if generator: + print('Starting reach loop with %s process' % nbProc) + with Pool(nbProc) as p: + params = [(id, configFileDir, nbProc) for (i, id) in enumerate(reach_ids)] + results = [] + for i, _ in enumerate(p.imap_unordered(processReachStats, params), 1): + results.append(_) + loopProgress = i/nbReachs*100 + globalProgress = 25 + (loopProgress/100*60) + yield globalProgress + else: + # this is the interesting part, launching N processes in parallel to process basins + # the locks are here to prevent concurrent terminal tqdm writing + with Pool(nbProc, initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),)) as p: + params = [(id, configFileDir, nbProc) for (i, id) in enumerate(reach_ids)] + results = list(tqdm(p.imap_unordered(processReachStats, params), + desc='[main process] get reach id => subbasins id [%s process] ' % nbProc, + total=nbReachs, + unit='reach', + bar_format=bar_format1 + )) + + # merge results + for r in results: + reach_ids_cleaned.append(r[0]) + subbasins_cleaned.append(r[1]) + + # restore main grass env + os.environ['GISRC'] = MAIN_GISRC + + print('') + grass_run_command('g.remove', flags='f', type='raster', name='MASK', stdout=DEVNULL, stderr=DEVNULL) + + print('---------- Reclassifying (watersheds*100000+subbasins)') + pReclass = grass_feed_command('r.reclass', input='cross1', output='cross1_reclassed', rules='-', overwrite='True') + for k in range(0, len(reach_ids_cleaned)): + pReclass.stdin.write(encode('%s=%s\n' % (subbasins_cleaned[k], reach_ids_cleaned[k]))) + pReclass.stdin.close() + pReclass.wait() + + + if generator: + yield 90 + + print('---------- Saving \'step2_subbasins_2.tif\'') + try: + grass_run_command('r.out.gdal', input='cross1_reclassed', output=os.path.join(directory_out, 'step2_subbasins_2.tif'), + overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + except: + sys.exit('------------> ERROR : Too many reclass category; check the value of ID of gauges and/or dams') + + print('---------- Creating vector layers from raster layers ... ') + grass_run_command('r.to.vect', flags='v', quiet=True, input='subbasins', output='subbasins_vector', type='area', overwrite='True') + grass_run_command('r.to.vect', flags='v', quiet=True, input='watersheds', output='watersheds_vector', type='area', overwrite='True') + ## we could clean the vector data in case isolated pixels in edges produce false small areas + ## (with no category because it comes from an isolated nodata pixel in watershed.tif) + #rasterWsheds = gdal.Open(basins) + #ulx, xres, xskew, uly, yskew, yres = rasterWsheds.GetGeoTransform() + #pixelArea = abs(xres) * abs(yres) + #grass_run_command('v.clean', + # #flags='v', + # quiet=True, + # input='watersheds_vector', + # type='area', + # tool='rmarea', + # threshold=pixelArea, + # output='watersheds_vector_clean', + # overwrite='True') + grass_run_command('r.to.vect', flags='v', quiet=True, input='cross1', output='cross1_vector', type='area', overwrite='True') + grass_run_command('r.to.vect', flags='v', quiet=True, input='cross1_reclassed', output='cross1_reclassed_vector', type='area', overwrite='True') + + if generator: + yield 95 + + # to make sure there is a projection in exported files (grass78 complains) + grass_run_command('g.proj', flags='c', georef=drain_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('v.out.ogr', + #flags='c', + quiet=True, + overwrite='True', + input='subbasins_vector', type='area', format='ESRI_Shapefile', output=os.path.join(directory_out, 'step2_step1_subbasins.shp')) + # we avoid c flag to skip areas with no category (result of vectorisation error) + grass_run_command('v.out.ogr', + #flags='c', + quiet=True, + overwrite='True', + input='watersheds_vector', type='area', format='ESRI_Shapefile', output=os.path.join(directory_out, 'step2_step2_watersheds.shp')) + grass_run_command('v.out.ogr', + #flags='c', + quiet=True, + overwrite='True', + input='cross1_vector', type='area', format='ESRI_Shapefile', output=os.path.join(directory_out, 'step2_step2_subbasins.shp')) + grass_run_command('v.out.ogr', + #flags='c', + quiet=True, + overwrite='True', + input='cross1_reclassed_vector', type='area', format='ESRI_Shapefile', output=os.path.join(directory_out, 'step2_step2_subbasins_2.shp')) + + + #TEST EXIST FILES AND FILL FILES + print('---------- HRU-delin Step 2-2 : Test of existing and completed files') + + #step2_step1_subbasins.shp + shp_subb_path = os.path.join(directory_out, 'step2_step1_subbasins.shp') + + if os.stat(shp_subb_path).st_size == 0: + print('--------------- step2_step1_subbasins.shp is empty or nonexistent') + else: + datasource_subb1 = ogr.Open(shp_subb_path) + layer_datasource_subb1 = datasource_subb1.GetLayer() + featureCount_datasource_subb1 = layer_datasource_subb1.GetFeatureCount() + if featureCount_datasource_subb1 > 0 : + print('--------------- step2_step1_subbasins.shp is created and it has ', featureCount_datasource_subb1, " features") + else : + print("--------------- step2_step1_subbasins.shp is created but it empty") + + #step2_step2_watersheds.shp + shp_watershed_path = os.path.join(directory_out, 'step2_step2_watersheds.shp') + + if os.stat(shp_watershed_path).st_size == 0: + print('--------------- step2_step2_watersheds.shp is empty or nonexistent') + else: + datasource_watershed2 = ogr.Open(shp_watershed_path) + layer_datasource_watershed2 = datasource_watershed2.GetLayer() + featureCount_datasource_watershed2 = layer_datasource_watershed2.GetFeatureCount() + if featureCount_datasource_watershed2 > 0 : + print('--------------- step2_step2_watersheds.shp is created and it has ', featureCount_datasource_watershed2, " features") + else : + print("--------------- step2_step2_watersheds.shp is created but it empty") + + #step2_step2_subbasins.shp + shp_subb2_path = os.path.join(directory_out, 'step2_step2_subbasins.shp') + + if os.stat(shp_subb2_path).st_size == 0: + print('--------------- step2_step2_subbasins.shp is empty or nonexistent') + else: + datasource_subb2 = ogr.Open(shp_subb2_path) + layer_datasource_subb2 = datasource_subb2.GetLayer() + featureCount_datasource_subb2 = layer_datasource_subb2.GetFeatureCount() + if featureCount_datasource_subb2 > 0 : + print('--------------- step2_step2_subbasins.shp is created and it has ', featureCount_datasource_subb2, " features") + else : + print("--------------- step2_step2_subbasins.shp is created but it empty") + + #step2_step2_subbasins_2.shp + shp_subb2_2_path = os.path.join(directory_out, 'step2_step2_subbasins_2.shp') + + if os.stat(shp_subb2_2_path).st_size == 0: + print('--------------- step2_step2_subbasins.shp is empty or nonexistent') + else: + datasource_subb2_2 = ogr.Open(shp_subb2_2_path) + layer_datasource_subb2_2 = datasource_subb2_2.GetLayer() + featureCount_datasource_subb2_2 = layer_datasource_subb2_2.GetFeatureCount() + if featureCount_datasource_subb2_2 > 0 : + print('--------------- step2_step2_subbasins_2.shp is created and it has ', featureCount_datasource_subb2_2, " features") + else : + print("--------------- step2_step2_subbasins_2.shp is created but it empty") + + + + print('---------- HRU-delin Step 2 ended ---------------------------------------------') + + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + for pc in main(parms_file, nbProc, False): + pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-6 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_2_7_isolate_pixel.py b/modules/hrudelin_2_7_isolate_pixel.py new file mode 100755 index 0000000000000000000000000000000000000000..f3837933a204f7eb0f66b9afd13885d3bb7baae9 --- /dev/null +++ b/modules/hrudelin_2_7_isolate_pixel.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +############################################################################ +# +# MODULE: hru-delin_basins.py +# AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University +# by IRSTEA - Christine Barachet, +# Julien Veyssier +# Michael Rabotin +# Florent Veillon +# PURPOSE: 1. Relocates the gauges on the reaches +# 2. Calculates watersheds at the gauges +# +# +# COPYRIGHT: (C) 2020 UR RIVERLY - INRAE +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file LICENSE that comes with +# HRU-DELIN for details. +# +############################################################################# + + + + +# to keep python2 compatibility +from __future__ import print_function +import string, os, sys, glob, types, time, platform +import numpy as np +try: + import ConfigParser +except Exception as e: + import configparser as ConfigParser +#import grass.script as grass +from grass.script.utils import decode, encode +import struct, math, csv, shutil + +from osgeo import gdal +from osgeo.gdalnumeric import * +from osgeo.gdalconst import * +from osgeo import ogr + +import multiprocessing +from multiprocessing import Pool, cpu_count + +from utils import isint, write_log +from reach import snapping_points_to_reaches, cut_streams_at_points +from reach import updateAttributeTable, processReachStats + +MY_ABS_PATH=os.path.abspath(__file__) +MY_DIR=os.path.dirname(MY_ABS_PATH) + +try: + # Python 3 + from subprocess import DEVNULL +except ImportError: + DEVNULL = open(os.devnull, 'wb') + +import pandas as pd +#pd.options.mode.chained_assignment = None +import geopandas as gpd +from rastertodataframe import raster_to_dataframe +import rtree +import pygeos + +import warnings +warnings.simplefilter(action='ignore', category=FutureWarning) + + +''' + + MAIN + +''' +def main(parms_file, nbProc, generator=False): + + """OUTPUT files + + """ + print(" ") + print('---------- HRU-delin Step 2-7 started ---------------------------------------------') + print("-----------------------------------------------------------------------------------") + + configFileDir = os.path.dirname(parms_file) + parms = ConfigParser.ConfigParser(allow_no_value=True) + tmpPath = os.path.join(configFileDir, 'tmp') + if not os.path.isdir(tmpPath): + os.mkdir(tmpPath) + parms.read(parms_file) + directory_out = parms.get('dir_out', 'files') + # manage absolute and relative paths + if not os.path.isabs(directory_out): + directory_out = os.path.join(configFileDir, directory_out) + #Set Grass environnement + os.environ['GISRC'] = os.path.join(configFileDir, 'grass_db', 'grassdata', 'hru-delin', '.grassrc') + + # Import step2_subbasins_2.tif + step2_subbasins_layer = os.path.join(directory_out, 'step2_subbasins_2.tif') + step2_subbasins_wk = 'step2_subbasins' + grass_run_command('r.in.gdal', flags='o', input=step2_subbasins_layer, output=step2_subbasins_wk, overwrite='True', stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.proj', flags='p', georef=step2_subbasins_layer, stdout=DEVNULL, stderr=DEVNULL) + grass_run_command('g.region', flags='sp', raster=step2_subbasins_wk, stdout=DEVNULL, stderr=DEVNULL) + + #STEP 1 : Raster to vector + print('---------- Creating vector layers from raster layers ... ') + grass_run_command('r.to.vect', flags='v', quiet=True, input='step2_subbasins', output='step2_subbasins_vector', type='area', overwrite='True') + grass_run_command('v.out.ogr',quiet=True,input='step2_subbasins_vector', type='area', format='ESRI_Shapefile', output=os.path.join(directory_out, 'step2_subbasins_2_vector.shp'), overwrite='True') + + #STEP 2 : Search shape with same ID + print('---------- Search shape with same ID ... ') + subbasins_vector = gpd.read_file(os.path.join(directory_out, 'step2_subbasins_2_vector.shp')) + subbasins_vector_sameID = subbasins_vector[subbasins_vector.groupby(['cat'])['geometry'].transform('nunique') > 1] + print(subbasins_vector_sameID) + #subbasins_vector_sameID['index1'] = subbasins_vector_sameID.index + subbasins_vector_sameID = subbasins_vector_sameID.rename_axis('index1').reset_index() + + + print(subbasins_vector_sameID) + + #STEP 3 : Area calculation + print('---------- Area calculation ... ') + #create new column with area of each shape + subbasins_vector_sameID = subbasins_vector_sameID.assign(area=subbasins_vector_sameID.area) + print(subbasins_vector_sameID) + + #STEP 4 : Identification of smaller layer for each ID + print('---------- Identification of smaller layer for each ID ... ') + #Extraction of ID and row line of layer with smaller area + pixel_sameID = subbasins_vector_sameID.groupby('cat', as_index=False)['area'].idxmin() + #Set new column with correspondance or not (True/False) + subbasins_vector_sameID["single"] = subbasins_vector_sameID.index1.isin(pixel_sameID.area) + #Subset df with isolate pixel + single_pixels = subbasins_vector_sameID.loc[subbasins_vector_sameID['single'] == True,:] + #print(single_pixels) + #Export single pixels to .shp + single_pixels.to_file(os.path.join(directory_out,"step2_single_pixels.shp")) + + #STEP 5 : Make buffer around single pixels + print('---------- Make buffer ... ') + single_pixels_shp = gpd.read_file(os.path.join(directory_out, 'step2_single_pixels.shp')) + buffer_pixels = single_pixels_shp.buffer(10) + #Export buffer pixels to .shp + buffer_pixels.to_file(os.path.join(directory_out,"step2_buffer_pixels.shp")) + + #STEP 6 : Intersection between buffer pixel and subbasins_vector + print('---------- Make intersection ... ') + #buffer pixel + buffer_pixels_shp = gpd.read_file(os.path.join(directory_out, 'step2_buffer_pixels.shp')) + #subbasin vector + subbasins_vector_shp = subbasins_vector + intersection = gpd.overlay(subbasins_vector_shp, buffer_pixels_shp, how='intersection') + #print(intersection) + + #STEP 7 : identification of single error pixel + print('---------- Make identification ... ') + #group by FID (1 FID for part of buffer) + test = intersection.groupby('FID') + + #set empty geopandasdataframe + df_error_pixel = gpd.GeoDataFrame() + #group by iteration + for name, group in test: + lenght_same_ID = len(group) + lenght_unique = len(group['cat'].unique()) +# print(lenght_same_ID ) +# print(lenght_unique ) + #if the buffer intersect only one shape with the same id + if lenght_same_ID ==2 and lenght_unique == 1: + group = group.assign(area=group.area) + #print(group) + group = group[group.area == group.area.max()] + df_error_pixel = df_error_pixel.append(group) + + #df_error_pixel["ID_gauges"] = np.nan + #print(df_error_pixel) + + print('---------- Isolate pixels ? ... ') + if len(df_error_pixel) >=1: + print('---------------- Yes : ') + for index, row in df_error_pixel.iterrows(): + print("---------------- ID of subbassins with isolate pixel : ", row["cat"]) + df_error_pixel.to_file(os.path.join(directory_out,"step2_error_pixels_subbasins.shp")) + else : + print('---------------- No') + +# #STEP 8 : Research for the nearest gauges +# print('---------- Research for the nearest gauges ... ') +# gauges_selected_newID = gpd.read_file(os.path.join(directory_out, 'gauges_selected.shp')) +# #Recherche du plus proche voisin +# #ATTENTION : formatage du fichier d'origine gauge : mettre ID +# test = gpd.sjoin_nearest(df_error_pixel, gauges_selected_newID) +# print(test.gid) + + + print('---------- HRU-delin Step 2-7 ended ---------------------------------------------') + + + +if __name__ == '__main__': + from grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from progressColors import * + # check TQDM presence only if we are executed + try: + from tqdm import tqdm + except Exception as e: + print('!! %stqdm module not found%s\n' % (COLOR_RED, COLOR_RESET)) + sys.exit(1) + + parms_file = 'hrudelin_config.cfg' + nbProcArg = '' + if len(sys.argv) > 1: + parms_file = sys.argv[1] + if len(sys.argv) > 2: + nbProcArg = sys.argv[2] + + # determine how many processes we can launch + if str(nbProcArg).isnumeric() and int(nbProcArg) > 0: + nbProc = int(nbProcArg) + else: + nbProc = cpu_count() + + # main is a generator but we don't use it here + # for pc in main(parms_file, nbProc, False): + # pass + + try: + os.system('notify-send "hru-delin-6-2 step 2-7 complete"') + except Exception as e: + pass +else: + from .grassUtils import buildGrassEnv, buildGrassLocation, exportRasters, importRastersInEnv,\ + grass_run_command, grass_parse_command, grass_feed_command, grass_read_command, grass_pipe_command + from .progressColors import * diff --git a/modules/hrudelin_3_hrugen.py b/modules/hrudelin_3_hrugen.py index a92591aa8a9ca27e860d3c5931f71f8029073850..e5cb17c5b332e936de86eced26e3e78df5132d7a 100755 --- a/modules/hrudelin_3_hrugen.py +++ b/modules/hrudelin_3_hrugen.py @@ -7,6 +7,8 @@ # AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University # by IRSTEA - Christine Barachet, # Julien Veyssier +# Michael Rabotin +# Florent Veillon # # PURPOSE: overlay of all selected layers # construction of HRUs @@ -160,7 +162,7 @@ def processSubbasin(params): # LOOP THAT ELIMINATES PIXELS ONLY sizes = range(2, min_area + 1) - print("ligne 163") + if generator: iterable2 = sizes else: @@ -175,10 +177,10 @@ def processSubbasin(params): ) iterable2.set_description('[process %s] basin [%s/%s] id %s single pixels %s' % (processN, padLeft(str(iRun), len(str(nbBasins))), nbBasins, pad(id.strip(), 6), pad('', 1))) - print("ligne 178") + initmap = 'clumps' counter_old = 0 - print("ligne 181") + while (True): counter = count_only(initmap, 1) @@ -209,7 +211,7 @@ def processSubbasin(params): grass_run_command('r.buffer', input='newmap_sz1', output='buffer', distances='%d'%buffer_distance, overwrite='True',stdout=DEVNULL, stderr=DEVNULL) - print("ligne 212") + grass_run_command('r.mapcalc', expression='newmap2=if((buffer==2),newmap,null())', overwrite='True',stdout=DEVNULL, stderr=DEVNULL) # there were sort/awk system calls here @@ -229,7 +231,7 @@ def processSubbasin(params): else: df3=df1 - print("ligne 232") + #out3Path = os.path.join(tmpPath, 'out3_proc%s' % processN) pReclass = grass_feed_command('r.reclass', input='newmap', output='test', rules='-', overwrite='True') @@ -252,7 +254,7 @@ def processSubbasin(params): pReclass.stdin.write(encode('2 = 2 9999999\n')) pReclass.stdin.close() pReclass.wait() - print("ligne 255") + grass_run_command('r.patch', input='buf_mask_new,test', output='sum_buf', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) grass_run_command('r.mapcalc', @@ -263,12 +265,12 @@ def processSubbasin(params): grass_run_command('r.mask', raster='subbasins', maskcats=id.rstrip('\n'), overwrite='True', stdout=DEVNULL, stderr=DEVNULL) for sz in iterable2: - print("ligne 266") + if not generator: iterable2.set_description('[process %s] basin [%s/%s] id %s group size %s' % (processN, padLeft(str(iRun), len(str(nbBasins))), nbBasins, pad(id.strip(), 6), pad(str(sz-1), 4)) ) - print("ligne 271") + if sz not in getAreasUpTo('newmap3', min_area): pass # I should create a MASK here to prevent the MASK removal to send an error later in the code... @@ -289,19 +291,23 @@ def processSubbasin(params): expression='map_d=if((not(isnull(base))),eval(a=base[0,-1],b=base[-1,0],c=base[0,1],d=base[1,0],i=base,max(a,b,c,d,i)))', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) grass_run_command('r.statistics', base='base', cover='map_d', method='mode', output='map1', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) - print("ligne 292") + #os.system("r.stats -lnN input=map1 | tail -n +2 | awk '{print $1 \" = \" $2}' | r.reclass --o input=base output=base_new rules=-") p = grass_pipe_command('r.stats', quiet=True, flags='lnN', input='map1') pReclass = grass_feed_command('r.reclass', overwrite=True, input='base', output='base_new', rules='-') # skip first line "0 0" - l = p.stdout.readline() + + # l = p.stdout.readline() + + + for l in p.stdout: lSpl = decode(l).rstrip(os.linesep).split() pReclass.stdin.write(encode('%s = %s\n' % (lSpl[0], lSpl[1]))) p.wait() pReclass.stdin.close() pReclass.wait() - print("ligne 304") + grass_run_command('r.patch', input='base_new,newmap3', output='newout', overwrite='True', stdout=DEVNULL, stderr=DEVNULL) # TODO there was a mapcalc here in v4 which was removed in v5...why? @@ -513,6 +519,7 @@ def main(parms_file, nbProc, generator=True): data_list.append('asp') mask_list.append('asp_msk') + # read sub-basin raster created in step 1 print('----------------------------- Importing raster \'step2_subbasins_2.tif\'') # new raster I. H. @@ -784,7 +791,7 @@ if __name__ == '__main__': res = list(main(parms_file, nbProc, False)) try: - os.system('notify-send "hru-delin step 3 complete"') + os.system('notify-send "hru-delin-6-2 step 3 complete"') except Exception as e: pass else: diff --git a/modules/hrudelin_parms_J2000.py b/modules/hrudelin_parms_J2000.py index 4176dfcbbc876deb974cd5ea740377b18c6a11ad..13a804dd3022c2f1262465394c80982ef79612ab 100755 --- a/modules/hrudelin_parms_J2000.py +++ b/modules/hrudelin_parms_J2000.py @@ -8,6 +8,8 @@ # AUTHOR(S): adapted from GRASS-HRU (ILMS) - JENA University # by IRSTEA - Christine Barachet, # Julien Veyssier +# Michael Rabotin +# Florent Veillon # # PURPOSE: calculates the topology # generates parameters files for J2000 @@ -30,11 +32,12 @@ from __future__ import print_function import string, os, sys, time, types, shutil import zipfile +import pythonGate as pyGate try: import ConfigParser except Exception as e: import configparser as ConfigParser -#import grass.script as grass +# import grass.script as grass from grass.script.utils import decode, encode import numpy as np @@ -822,7 +825,6 @@ def main(parms_file, nbProc, generator=False): hrus_shp = os.path.join(dir_results, 'hru.shp') grass_run_command('v.out.ogr', flags='c', input='hrus_v_cat2', type='area', format='ESRI_Shapefile', output=hrus_shp) - #if irrigation, select surface point and find the nearest reach #if irrigation, select gw point and find the nearest HRU gauges_col_name = parms.get('gauges', 'gauges_col_name') @@ -943,9 +945,9 @@ def main(parms_file, nbProc, generator=False): if (parms.get('topology', 'hru_no_topology_log')) == 'yes': - print('------------- Export HRU with no topology in hru_with_no_topology.par -------------') + print('------------- Export HRU with no topology in hru_with_no_toology.par -------------') # export in csv the hru with no topology (no downstream reach and hru) - notopology_file = os.path.join(dir_results, 'hru_with_no_topology.par') + notopology_file = os.path.join(dir_results, 'hru_with_no_toology.par') grass_run_command('v.db.select', flags='c', map='hrus_c_pnt', columns='cat', where='to_reach IS NULL AND to_poly IS NULL', file=notopology_file, overwrite=True) @@ -1057,6 +1059,19 @@ def main(parms_file, nbProc, generator=False): #os.system('cp "%s" "%s" ' % (parms_file, save_config)) shutil.copyfile(parms_file, save_config) + ## running irrigation programs in R using pyGate by Theo L. + + # execFile = pyGate.ExecFile("/home/michael.rabotin/1_HYBV/HRU_DELIN/hru-delin-dev/rScript/", "MDR_areaselect_irrigated_HRUs.r", "Rscript ") + #sndStub = pyGate.SenderStub(execFile) + # dictionaire = pyGate.Dictionary("irrigation_files") + # dictionaire.addParameter("output_dir", dir_results) + # dictionaire.addParameter("hruFile", parms.get("irrigation_analysis", "HRU_file")) #passe le chemin depuis le fichier de configuration du fichier HRU.dbf + # dictionaire.addParameter("cantonFile", parms.get("irrigation_analysis", "cantons_file")) #passe le chemin depuis le fichier de configuration du fichier cantons.dbf + + # sndStub.dictionaries.append(dictionaire) + # sndStub.run("/home/michael.rabotin/1_HYBV/HRU_DELIN/hru-delin-dev/gateway-main/gateway/build/", "gate.exe") + + if not generator: iterable3.update(1) iterable3.close() @@ -1097,7 +1112,7 @@ if __name__ == '__main__': res = list(main(parms_file, nbProc, False)) try: - os.system('notify-send "hru-delin step 4 complete"') + os.system('notify-send "hru-delin-6-2 step 4 complete"') except Exception as e: pass else: @@ -1107,3 +1122,5 @@ else: from .circleKill import circleKill from .reach import buildReachPar from .awk import * + + diff --git a/modules/pyGate/__init__.py b/modules/pyGate/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d00e18d84fd70840a4e23ac61310303356262df0 --- /dev/null +++ b/modules/pyGate/__init__.py @@ -0,0 +1,4 @@ +from .argument import Dictionary, Parameter +from .file import File, ExecFile, OutputFile +from .stub import SenderStub, ReceiverStub + diff --git a/modules/pyGate/__pycache__/__init__.cpython-38.pyc b/modules/pyGate/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97285a0549216e244ae8b0cace0b1f969625150b Binary files /dev/null and b/modules/pyGate/__pycache__/__init__.cpython-38.pyc differ diff --git a/modules/pyGate/__pycache__/argument.cpython-38.pyc b/modules/pyGate/__pycache__/argument.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..211865e96becb5558fd0e9a45d4c80f1ae36ab9e Binary files /dev/null and b/modules/pyGate/__pycache__/argument.cpython-38.pyc differ diff --git a/modules/pyGate/__pycache__/file.cpython-38.pyc b/modules/pyGate/__pycache__/file.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ef57213d57abc91e5f6a3854bd27e63acd851fe Binary files /dev/null and b/modules/pyGate/__pycache__/file.cpython-38.pyc differ diff --git a/modules/pyGate/__pycache__/serializable.cpython-38.pyc b/modules/pyGate/__pycache__/serializable.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f12a3073d81f8ebfd6e84f4121653384898716e Binary files /dev/null and b/modules/pyGate/__pycache__/serializable.cpython-38.pyc differ diff --git a/modules/pyGate/__pycache__/stub.cpython-38.pyc b/modules/pyGate/__pycache__/stub.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f76fe69f685fb1cf7b1907a0fbfe0a5984251e2 Binary files /dev/null and b/modules/pyGate/__pycache__/stub.cpython-38.pyc differ diff --git a/modules/pyGate/argument.py b/modules/pyGate/argument.py new file mode 100644 index 0000000000000000000000000000000000000000..b108acee27d438015a321a34b9a3394ed2195d67 --- /dev/null +++ b/modules/pyGate/argument.py @@ -0,0 +1,177 @@ +import json +from abc import abstractmethod +from .serializable import Serializable + + +class Argument(Serializable): + def __init__(self, name): + self.name = name + self.value = None + + @abstractmethod + def serialize(self) -> dict: + pass + + def display(self): + pass + + +class Parameter(Argument): + def __init__(self, name, value): + super().__init__(name) + self.value = value + + def display(self) -> str: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + + def getArgument(self, name: str) -> "Argument | None": + if self.name == name: + return self + return None + + def getValueAsInt(self): + return int(self.value) + + def getValueAsFloat(self): + return float(self.value) + + def getValueAsList(self): + return list(json.loads(self.value)) + + def serialize(self) -> dict: + return { + "Parameter": { + "name": self.name, + "value": self.value + } + } + + +class Dictionary(Argument): + def __init__(self, name): + super().__init__(name) + self.value = [] + + def addParameter(self, name: str, value): + """ + This function will add a parameter to the dictionary. + + Parameters + ---------- + name : str + The name of the parameter + + value + The value of the parameter + """ + self.addArgument(Parameter(name, str(value))) + + def addArgument(self, argument: Argument) -> None: + """ + This function will add an argument to the Dictionary. + + Parameters + ---------- + argument : Argument + The argument to add + + Raises + ------ + TypeError + If the given parameter is not of Argument type + """ + if isinstance(argument, Argument): + self.value.append(argument) + else: + raise TypeError("parameter needs to inherits from Argument") + + def getArgument(self, name: str) -> "Argument | None": + """This function will return an argument if it exists with the given name. + If no argument exists with the given name, it will return None. + + Parameters + ---------- + name : str + The name of the argument + + Returns + ------- + Argument, None + The argument with the right name, or nothing + """ + if self.name == name: + return self + + for argument in self.value: + arg = argument.getArgument(name) + if arg is not None: + return arg + + return None + + def getParameter(self, name: str) -> "Parameter | None": + """ + This function will return a parameter if it exists with the given name. + If no parameter exists with the given name, it will return None. + + Parameters + ---------- + name : str + The name of the parameter + + Returns + ------- + Parameter, None + The parameter with the right name, or nothing + """ + for argument in self.value: + arg = argument.getArgument(name) + if arg is not None: + if type(arg) is Parameter: + return arg + + return None + + def display(self) -> str: + """ + This function will print and return a string representation of the dictionary. + + Returns + ------- + str + The string representation of the dictionary + """ + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + + def serialize(self) -> dict: + dico = { + "Dictionary": { + "name": self.name + } + } + value = [] + + for val in self.value: + value.append(val.serialize()) + dico["Dictionary"]["value"] = value + + return dico + + def deserialize(self, dico: dict) -> None: + self.name = dico["name"] + + value_json = dico["value"] + + for val in value_json: + if "Dictionary" in val: + dictionary = Dictionary(val["Dictionary"]["name"]) + dictionary.deserialize(val["Dictionary"]) + + self.value.append(dictionary) + elif "Parameter" in val: + parameter = Parameter(val["Parameter"]["name"], val["Parameter"]["value"]) + self.value.append(parameter) diff --git a/modules/pyGate/file.py b/modules/pyGate/file.py new file mode 100644 index 0000000000000000000000000000000000000000..dc11c6d2685cb2d089d1df4c66b6f268139d4dc0 --- /dev/null +++ b/modules/pyGate/file.py @@ -0,0 +1,132 @@ +from .argument import * + + +class File(Serializable): + def __init__(self, path: str, name: str): + self.name = name + self.path = path + self.actif = True + + def serialize(self) -> dict: + if self.actif: + return { + "File": { + "name": self.name, + "path": self.path + } + } + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" + + +class ExecFile(File): + def __init__(self, path: str, name: str, cmd: str): + super().__init__(path, name) + self.cmd = cmd + + def serialize(self) -> dict: + if self.actif: + dico = super().serialize() + dico["ExecFile"] = dico.pop("File") + dico["ExecFile"]["cmd"] = self.cmd + + return dico + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" + + +class OutputFile(File): + def __init__(self, path: str, name: str): + super().__init__(path, name) + + def displayContent(self): + if self.actif: + json_line = json.dumps(self.read(), indent=2) + print(json_line) + return json_line + return "" + + def readAsDictionary(self) -> Dictionary: + if self.actif: + dictionary_dict = Dictionary("outputFile") + + file = self.read() + output_dico = Dictionary("Outputs") + for output in file["Outputs"]: + dico = Dictionary(output["Dictionary"]["name"]) + dico.deserialize(output["Dictionary"]) + output_dico.addArgument(dico) + + dictionary_dict.addArgument(output_dico) + + error_dico = Dictionary("Error") + error_dico.addParameter("code", file["Error"]["code"]) + error_dico.addParameter("description", file["Error"]["description"]) + error_dico.addParameter("traceback", file["Error"]["traceback"]) + + dictionary_dict.addArgument(error_dico) + + check_dico = Dictionary("Check") + for dataFile in file["Check"]: + dataFile_dico = Dictionary(dataFile["DataFile"]["fileName"]) + dataFile_dico.addParameter("name", dataFile["DataFile"]["fileName"]) + dataFile_dico.addParameter("nbMissing", dataFile["DataFile"]["nbMissing"]) + dataFile_dico.addParameter("missing", dataFile["DataFile"]["missing"]) + dataFile_dico.addParameter("nbIncorrect", dataFile["DataFile"]["nbIncorrect"]) + dataFile_dico.addParameter("incorrect", dataFile["DataFile"]["incorrect"]) + + check_dico.addArgument(dataFile_dico) + + dictionary_dict.addArgument(check_dico) + + return dictionary_dict + return Dictionary("") + + def read(self) -> dict: + if self.actif: + file = open(self.path + self.name, 'r') + lines = file.readlines() + file.close() + + json_file = "\n".join(lines) + return json.loads(json_file) + return {} + + def writeOutput(self, dictionary: Dictionary): + if self.actif: + file = self.read() + outputs = file["Outputs"] + outputs.append(dictionary.serialize()) + file["Outputs"] = outputs + + file_json = json.dumps(file, indent=2) + file = open(self.path + self.name, 'w') + file.write(file_json) + file.close() + + def serialize(self) -> dict: + if self.actif: + dico = super().serialize() + dico["OutputFile"] = dico.pop("File") + + return dico + return {} + + def display(self) -> str: + if self.actif: + json_line = json.dumps(self.serialize(), indent=2) + print(json_line) + return json_line + return "" diff --git a/modules/pyGate/gate.exe b/modules/pyGate/gate.exe new file mode 100644 index 0000000000000000000000000000000000000000..5645c63f1e16bc0e4b79860ddd147a58ebd0c916 Binary files /dev/null and b/modules/pyGate/gate.exe differ diff --git a/modules/pyGate/serializable.py b/modules/pyGate/serializable.py new file mode 100644 index 0000000000000000000000000000000000000000..e6901a9d1f7d00868e53bf6932a0476e72751c04 --- /dev/null +++ b/modules/pyGate/serializable.py @@ -0,0 +1,7 @@ +from abc import ABC, abstractmethod + + +class Serializable(ABC): + @abstractmethod + def serialize(self) -> dict: + return {} diff --git a/modules/pyGate/stub.py b/modules/pyGate/stub.py new file mode 100644 index 0000000000000000000000000000000000000000..31b9256dbef9277029d71f9cdd3ea1c4b3273237 --- /dev/null +++ b/modules/pyGate/stub.py @@ -0,0 +1,130 @@ +import subprocess +from typing import List +import sys + +from .file import * + + +class Stub: + def __init__(self, outputFile: "OutputFile | None"): + if outputFile is None: + outputFile = OutputFile("", "") + + self.actif = True + self.outputFile: "OutputFile" = outputFile + self.dictionaries: List[Dictionary] = [] + + def findArgumentWithName(self, name: str): + if self.actif: + dico = None + for dictionary in self.dictionaries: + dico = dictionary.getArgument(name) + if dico is not None: + break + + return dico + return None + + def getArgument(self, name: str): + if self.actif: + return self.findArgumentWithName(name) + return None + + def findDictionaryWithName(self, name: str): + if self.actif: + for dico in self.dictionaries: + if dico.name == name: + return dico + + return None + + def displayDictionaries(self) -> str: + if self.actif: + dictionaries = [] + dico = {} + + for dictionary in self.dictionaries: + dictionaries.append(dictionary.serialize()) + + dico["Dictionaries"] = dictionaries + json_line = json.dumps(dico, indent=2, sort_keys=True) + + print(json_line) + return json_line + return "" + + def displayOutputFile(self) -> str: + if self.actif: + json_line = json.dumps(self.outputFile.serialize(), indent=2, sort_keys=True) + + print(json_line) + return json_line + return "" + + def displayAll(self) -> str: + if self.actif: + return self.displayOutputFile() + self.displayDictionaries() + return "" + + +class SenderStub(Stub): + def __init__(self, execFile=None, outputFile=None): + super().__init__(outputFile) + self.execFile: ExecFile = execFile + + def run(self, gatePath: str, gateName: str): + print(" =============== Running gateway =============== ") + subprocess.run(gatePath + gateName + " '" + self.serialize() + "'", shell=True) + print(" =============== Gateway ending ================ ") + + def serialize(self) -> str: + dico = { + "ExecFile": self.execFile.serialize()["ExecFile"], + "OutputFile": self.outputFile.serialize()["OutputFile"] + } + + dictionaries = [] + + for dictionary in self.dictionaries: + dictionaries.append(dictionary.serialize()) + + dico["Dictionaries"] = dictionaries + + return json.dumps(dico) + + def displayExecFile(self) -> str: + json_line = json.dumps(self.execFile.serialize(), indent=2, sort_keys=True) + + print(json_line) + return json_line + + def displayAll(self) -> str: + return self.displayExecFile() + super().displayAll() + + +class ReceiverStub(Stub): + def __init__(self): + super().__init__(None) + self.actif = True + + self.deserialize(self.readArguments()) + + def readArguments(self) -> str: + if len(sys.argv) > 1: + return str(sys.argv[1]) + self.actif = False + self.outputFile.actif = False + + def deserialize(self, json_line: str) -> None: + if self.actif: + data_line = json.loads(json_line) + + self.outputFile = OutputFile(data_line["OutputFile"]["path"], data_line["OutputFile"]["name"]) + + dictionaries_json = data_line["Dictionaries"] + + for dictionary_json in dictionaries_json: + dictionary = Dictionary(dictionary_json["Dictionary"]["name"]) + dictionary.deserialize(dictionary_json["Dictionary"]) + + self.dictionaries.append(dictionary) diff --git a/modules/testerTL.py b/modules/testerTL.py new file mode 100644 index 0000000000000000000000000000000000000000..ed9366bd385fee997d0a2b4ddf530f3298c460d5 --- /dev/null +++ b/modules/testerTL.py @@ -0,0 +1,7 @@ +import pyGate + +execFile = pyGate.ExecFile("../rScript/", "MDR_areaselect_irrigated_HRUs.r", "Rscript ") +sndStub = pyGate.SenderStub(execFile) +# TODO add files to send + +sndStub.run("pyGate/", "gate.exe") diff --git a/rScript/MDR_areaselect_irrigated_HRUs.r b/rScript/MDR_areaselect_irrigated_HRUs.r new file mode 100644 index 0000000000000000000000000000000000000000..cde94822bbe076f7f92306a5a1f1287827e6d13c --- /dev/null +++ b/rScript/MDR_areaselect_irrigated_HRUs.r @@ -0,0 +1,239 @@ +#~****************************************************************************** +#~* Selects the HRUs where irrigation is to be deployed, based on their area +#~* , canton location (irrigated or not) and comparison to SAU_irr_in_canton +#~* +#~* The area of diverse combinations of HRUs is compared to the SAU_irr_in_canton, +#~* starting with combinations of only 1 HRU in the canton, and increasing till +#~* being over the SAU_irr_in_canton. +#~* If the single-element alreading brings the HRU irrigated area above the SAU_irr_in_canton +#~* , the smallest HRU in the canton is irrigated and all others are not. +#~* The eligible combinations are tested in 3 passes with increasing tolerance to +#~* error in the total HRU irrigated area : 10 %, 30 % and 100 %. +#~* +#~* The results are +#~* * a vector irrigated (value : 0 or 1) with length: N_hrus_total +#~* * a vector irrig_type (value : 0, 1 or 2) with length: N_hrus_total. +#~* 1 = asp ; 2 = gag +#~* * a vector area_error indicating the % of error in surface committed with the new HRUirr +#~* +#~* - Le programme calcule les indices des HRUs qui irriguent un canton, en utilisant l'algorithme de combinaison de nombres afin d'additionner les surfaces des HRUs pour trouver la surface la plus proche possible de la surface totale du canton. +#~* +#~* - Le programme calcule également la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +#~* +#~* - Le programme écrit les résultats dans un fichier externe. +# +#~* - Le fichier externe contient 5 colonnes. La première est le numéro du HRU, la deuxième est sa surface, la troisième indique si le HRU irrigue le canton (1) ou non (0), la quatrième donne le type d'irrigation du HRU et la cinquième donne la différence en pourcentage entre la surface totale du canton et la surface totale des HRUs irriguant le canton. +# %). + +#~****************************************************************************** +#~* PROGRAMMER: Isabelle GOUTTEVIN (Irstea Lyon) +#~****************************************************************************** +#~* CREATED/MODIFIED: +# Created 2015-12-09 by Isabelle GOUTTEVIN (Irstea Lyon) +# Modified 2022-05-04 by Theo L (INRAE Lyon) +#~****************************************************************************** +setwd("/home/tlabrosse/Bureau/maestro/hru-delin-master/rScript/") +library(combinat) +library(foreign) +source("lib/rgate/Stub.R") + + +rcvStub = ReceiverStub$new() + + + +# *** FONCTIONS *** + +# ------------------------------------------------------------------------ +# index_of_nearest <- function(x, number){ +# Finds the index of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The index of the element in x that is closest to number +# ------------------------------------------------------------------------ +index_of_nearest <- function(x, number){ + return (which(abs(x-number)==min(abs(x-number))))} + + + + +# ------------------------------------------------------------------------ +#value_of_nearest(c(5,2,1),6) +# value_of_nearest <- function(x, number) +# Finds the value of the element in x that is closest to number. +# Args: +# x: A vector of numbers +# number: A number +# Returns: +# The value of the element in x that is closest to number +# ------------------------------------------------------------------------ +value_of_nearest <- function(x, number){ + return (x[which(abs(x-number)==min(abs(x-number)))])} + + + + +# ------------------------------------------------------------------------ +# try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance) +# Tries to find the combination of HRUs that best fits the given irrigation area. +# Args: +# n: The current number of HRUs to be added to the combination +# S_HRUs: A vector of HRUs' surface +# S_irr_Canton: The target irrigation area +# tolerance: The maximum error tolerated by the user +# Returns: +# The index of the HRUs that best fit the target irrigation area, +# " continue " if the current combination does not work but a smaller combination might, +# " non convergence " if the current combination does not work and neither does a smaller combination. +# ------------------------------------------------------------------------ +try_combination <- function(n, S_HRUs, S_irr_Canton, tolerance){ + + if (n < length(S_HRUs)){ + combi <- combn(S_HRUs, n) + } else { + combi <- t(t(S_HRUs)) + } + sumcombi <- apply(combi, 2, sum) + + + nearestarea <- value_of_nearest(sumcombi, S_irr_Canton) + error_nearest <- abs(1-nearestarea/S_irr_Canton)*100. + + if (error_nearest[1] < tolerance){ + + combi_selected <- index_of_nearest(sumcombi, S_irr_Canton) + index_selected <- NULL + for (i in 1:n){ + index_selected <- c(index_selected, which(S_HRUs==combi[, combi_selected][i])) + } + return (index_selected) + + } else if (min(sumcombi) > S_irr_Canton){ + + if (n==1){ + return(which(sumcombi==min(sumcombi))) + } else { + return ("non convergence") + } + } else { + return ("continue") + } +} + +# ------------------------------------------------------------------------ +# main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) +# Main function of the irrigation assignment process. +# Args: +# hrus_irrig_cantons_filePath: The path to the HRUs irrigated cantons data file +# cantons_irrigues_filePath: The path to the cantons irrigated data file +# Returns: +# A file with the irrigation status of every HRU +# ------------------------------------------------------------------------ +main <- function(hrus_irrig_cantons_filePath, cantons_irrigues_filePath) { + hrus_irrig_cantons <- read.dbf(hrus_irrig_cantons_filePath) + cantons_irrigues <- read.dbf(cantons_irrigues_filePath) + + + + # mélange des lignes pour pouvoir avoir un résultat vraiment aléatoire + hrus_irrig_cantons= hrus_irrig_cantons[sample(seq_len(nrow(hrus_irrig_cantons))), ] + cantons_irrigues= cantons_irrigues[sample(seq_len(nrow(cantons_irrigues))), ] + + + N_hru <- dim(hrus_irrig_cantons)[1] + + # creates two vector of the size of the number of currently irrigated HRUs + irrigated <- rep(0, N_hru) + area_error <- rep(0, N_hru) + + # creates a vector of the size of the number irrigated cantons + canton_traite <- rep(0, dim(cantons_irrigues)[1]) + + + tolerances <- c(10, 30, 100) + + for(tolerance in tolerances) { + for (numcanton in cantons_irrigues$CODE_CAN_1[which(canton_traite==0)]){ + + indice_canton <- which(cantons_irrigues$CODE_CAN_1==numcanton) + + # Find the HRU of the current canton + hrus <- hrus_irrig_cantons[which(hrus_irrig_cantons$CODE_CAN_1==numcanton), ] + + if (dim(hrus)[1]<=0){ + canton_traite[indice_canton] <- 1 + } else { + indices <- which(hrus_irrig_cantons$CODE_CAN_1==numcanton) # trouve le(s) HRU(s) associe au canton etudie + S_HRUs <- hrus$AREA # surface du/des HRU(s) en m2 + + S_irr_Canton <- cantons_irrigues[which(cantons_irrigues$CODE_CAN_1==numcanton), ]$SAU_IRR*100. # le "*100" lie au fait que les donnees du RGA sont en ares = 100m2 + + index_of_HRUs <- "continue" + n_elements_combi <- 1 + while ((index_of_HRUs=="continue") && (n_elements_combi <= length(S_HRUs))){ + index_of_HRUs <- try_combination(n_elements_combi, S_HRUs, S_irr_Canton, tolerance) + n_elements_combi <- n_elements_combi+1 + } + + if (index_of_HRUs=="non convergence" || index_of_HRUs=="continue") { + irrigated[indices] <- NA + } else { + irrigated[indices] <- 0 + irrigated[indices[index_of_HRUs]] <- 1 + area_error[indices] <- (sum(S_HRUs[index_of_HRUs])/S_irr_Canton-1)*100. + + canton_traite[indice_canton] <- 1 + } + } + } + } + + + irrig_type <- rep(0, N_hru) + + # il est entrain de mettre un vector dans chaque case du vector là , je me trompe ? + irrig_type[which(irrigated >0)] <- hrus_irrig_cantons$IRRIG_TYPE[which(irrigated >0)] + + file = "/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Irrigated_AleatoireHRUselect.csv" + write.table(cbind(hrus_irrig_cantons$CAT,hrus_irrig_cantons$AREA, irrigated, irrig_type, area_error),file ,append=F, sep="\t", row.names=FALSE, col.names=c('HRUnum', 'HRUarea', 'irrigated', 'irrig_type', 'area_error')) + + dico = Dictionary$new("files") + dico$addParameter("file", file) + # rcvStub$outputFile$writeOutput(dico) +} + +# *** MAIN CODE *** +# ----------------- + +cantonFile = rcvStub$getArgument("hruFile")$value +hruFile = rcvStub$getArgument("cantonFile")$value + +if(is.null(cantonFile) || is.null(hruFile)) { + cantonFile = "/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/Cantons_irrigues.dbf" + hruFile = "/home/tlabrosse/Bureau/maestro/irrigation-R-codes/Irrigation/Shapes/hrus_irriguees_sur_Rhone.dbf" +} + +main(hruFile, + cantonFile +) + + + + + +# TODO what is this ? -> +# Annexe : creation de la table des surfaces irriguees modelisees par canton + +# library(foreign) # there is no need to load this lib twice, right? + +# thats is probably not the right file +# HRU_Aleatoir <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/hrus_irriguees_Aleatoires.dbf') +# SHRUirr_can <- NULL +# for (un_canton in sort(unique(HRU_Aleatoir$CODE_CAN_1))){ +# SHRUirr_can <- c(SHRUirr_can, sum(HRU_Aleatoir[which(HRU_Aleatoir$CODE_CAN_1 == un_canton),]$AREA/100)) #ares +# } +# # TODO put it in the right folder +# write.table(cbind(sort(unique(HRU_Aleatoir$CODE_CAN_1)),SHRUirr_can),'/home/tlabrosse/Bureau/maestro/irrigation-R-codes/resultats OUT/Bilan_HRU_Aleatoir.txt',append=F, sep="\t", row.names=FALSE, col.names=c('canton', 'HRUirrig_area')) + diff --git "a/rScript/MDR_select_culture_irrigu\303\251e.r" "b/rScript/MDR_select_culture_irrigu\303\251e.r" new file mode 100644 index 0000000000000000000000000000000000000000..22a118c829bf70d5a5108b00f242f09352f8e1bd --- /dev/null +++ "b/rScript/MDR_select_culture_irrigu\303\251e.r" @@ -0,0 +1,203 @@ +#***** SCRIPT déterminant la culture dominante à affecter à une HRU ******* +# *** en fonction des données de culture irriguées du RGA et *** +# * du besoin en eau théorique de chaque culture présente * + + +# auteur : IG +# date : 15-12-2015 + +# -------------------------------------------------------------------------- +library(gdata) +library(lubridate) +library(foreign) + +source('lib/utilitaire_irrigation.R') + + +# 1. Rassembler les différentes sources de données +# ************************************************ + +# RGA +#-------------- + +RGA <- read.xls('~/Documents/MDR/irrigation/RGACultures2010_Cantons_BVRhone_sanssecret_20131001.xlsx', sheet = 3) +cantons <- RGA[, 1] +cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère", "Divers", "Industrielles") +RGA <- RGA[, c(19:26, 28, 30, 31, 29, 27)]# colonnes irriguées, 13 types de culture. ACGTUNG !!! les colonnes ne sont pas dans le meme ordre que dans J2000 !! +rownames(RGA) <- cantons +colnames(RGA) <- cultures + + +# param de J2000 +#----------------- + +# nom abrégé et numéro des cultures +# cette manière de faire est fragile, si les données venait à changer, il faudrait changer le code +numJ2000_cultures <- 19:31 +numnomcultures <- rbind(cultures, numJ2000_cultures) + +# Kc mensuels par cultures +luparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'landuse.par') +kc <- luparam[19:31, 3:14] +colnames(kc) <- 1:12 +rownames(kc) <- cultures + +# Période d'irrigation, transformées en fraction mensuelles +irrigstart <- luparam[19:31, 36] +irrigend <- luparam[19:31, 37] + # on met des valeurs réalistes là où pas d'info de base +# on dirait que cette partie de code à été fait pour des données très précise, pas sur que ça marche avec autre chose +irrigstart[9] <- irrigstart[6]; irrigend[9] <- irrigend[6] # prot==pdt +irrigstart[10] <- 100; irrigend[10] <- 250 # riz +irrigstart[11] <- irrigstart[2]; irrigend[11] <- irrigend[2] # jach et autres = mais +irrigstart[12] <- irrigstart[7]; irrigend[12] <- irrigend[7] # jardins et autres == vergers +irrigstart[13] <- irrigstart[7]; irrigend[13] <- irrigend[7] # industrielles == prairies + +date1 <- ymd_hms("2000/01/01 00:00:00") +irrigperiod <- interval(as.Date(irrigstart, date1), as.Date(irrigend, date1)) +debutmois <- c(date1, date1+months(1:11)) +finmois <- date1+months(1:12) +monthsperiods <- interval(debutmois, finmois) + +wheightedperiod <- NULL +for (cult in seq_along(cultures)){ + wheightedperiod <- rbind(wheightedperiod, as.period(intersect(monthsperiods, irrigperiod[cult]))/months(1)) +} +wheightedperiod[which(is.na(wheightedperiod))] <- 0 # rmqs : le calcul n'est pas tout à fait exact en raison de la conversion imprécise JulianDay -> Date + # pour la période d'irrigation ==> à améliorer. + + +# ETO mensuelle interannuelle par hru irriguée +# -------------------------------------------- +# This part of the program reads in a file of data on irrigated HRUs and loops through each HRU to calculate the monthly reference evapotranspiration. +# It outputs a file called "HRULoop.dat" with the monthly reference evapotranspiration for each HRU. +# It also assigns a variable to each HRU's monthly reference evapotranspiration, with the variable name being "refET_" followed by the HRU number. +# So, for example, if HRU 1 had a monthly reference evapotranspiration of 3 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_1" with the value 3. +# If HRU 2 had a monthly reference evapotranspiration of 4 mm/month, the program would output a file with a single column and 12 rows, and would also create a variable called "refET_2" with the value 4. +# And so on. +# The program does not produce any visual output. +# It is important to note that this program requires the source('lib/aggregateZoo_functions.r') in order to run properly. +# +# - liste des HRUs irriguées +hrus_all <- read.csv('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/Irrigated_AleatoireHRUselect.csv') +irrigated <- hrus_all[which(hrus_all$irrigated ==1),] + +HRULoop <- ReadLoopDaily('~/JAMS/modeldata/J2K_Rhone_Irrigation/output/refET/',"HRULoop.dat",TRUE) +Dates <- HRULoop$dates + +# - ET0 mensuelles interannuelles +for (myhru in irrigated[,1]){ + myrefET <- HRULoop$Data[which(HRULoop$Data[, 1]==myhru), which(colnames(HRULoop$Data)=='refET')] + myrefET <- aggregateZoo(zoo(myrefET, Dates), 'm', 'mean') + assign(paste0('refET_', myhru), myrefET) +} + +# 2. Comparaison des besoins théoriques sur chaque HRU et affectation du type de culture irrigué +# *********************************************************************************************** + +hrus_et_cantons <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus_decoupees.dbf') +culture_finale <- NULL +for (hrus in hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)]){ + + un_canton <- hrus_et_cantons$CODE_CAN_1[which(hrus_et_cantons$CAT==hrus)] + sommeprod <- (as.matrix(kc) *as.matrix(wheightedperiod)) %*% as.vector(get(paste0('refET_', hrus))) # une valeur par culture + refETmoyyear_ponderee <- t(sommeprod)*RGA[as.character(un_canton), 1:13] # .. pondérée par la surface en culture sur le canton. + culture_retenue <- cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + numculture_retenue <- numJ2000_cultures[which(refETmoyyear_ponderee==max(refETmoyyear_ponderee))] + culture_finale <- c(culture_finale, numculture_retenue) +} + + + +# 3. modifications du fichiers hrus.par et des .dbf +# *************************************************** + +# hrus.par +#------------- +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/', 'hrus.par') +culture_init <- hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] +#culture_finale[which(culture_init!=culture_finale)] +corresp_irrigtype <- c(2, 1, 1, 1, 2, 1, 2, 1, 2, 3, 1, 1, 2) +irrigtype <- NULL +for (cult in culture_finale){ + ind <- which(numJ2000_cultures==cult) + irrigtype <- c(irrigtype, corresp_irrigtype[ind]) +} +# 11: landuseID +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 11] <-culture_finale +# 15: irrigated +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 15] <- 1 +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 15] <- 0 +# 16: irrig_type +hruparam[which(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ), 16] <- irrigtype +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] )), 16] <- 0 + + +# on remet à 4 les landuseID agricoles des HRUs qui ne sont plus irriguées maintenant (proposition pour plus tard : on met la culture dominante non-irriguée, pour prendre en compte des Kc améliorés) +# c'est un peu compliqué car on n'a plus le hrus.par de référence sans irrigation.... +hruparam[which(!(hruparam$V1 %in% hrus_et_cantons$CAT[order(hrus_et_cantons$CAT)] ) & (hruparam$V11>18) ), 11] <-4 +# step 1: Montagne (V4 : slope ; V11: landuseID) +indices <- which((hruparam$V4 > 10) & (hruparam$V11== 4)) +if (length(indices !=0)){ + hruparam[indices,11] <-12} + +# step 2: Dombes +Dombes_Chalaronne <- 6832 +Dombes_Veyle <- 6800 + +reach <- Chargement_param ('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','reach.par') +indID <- 1 +indLand <- 11 +indSub <- 9 + +brins_chala <- Topologie(Dombes_Chalaronne, reach) +brins_veyle <- Topologie(Dombes_Veyle, reach) + +Total_hru_Chala <- NULL +for (k in brins_chala){ + Total_hru_Chala <- c (Total_hru_Chala,hruparam[hruparam[,indSub] == k,indID])} +Total_hru_Veyle <- NULL +for (k in brins_veyle){ + Total_hru_Veyle <- c (Total_hru_Veyle,hruparam[hruparam[,indSub] == k,indID])} + +for (k in Total_hru_Chala){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ])) != 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +for (k in Total_hru_Veyle){ + if(length(which(k == hruparam[which(hruparam[,indLand] == 4 ), indID ]))!= 0) {hruparam[which(k == hruparam[,indID]),indLand] <- 18} +} +#test : which(hruparam$V11 == 18) +write_new_paramfile('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus.par', hruparam, '~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/hrus_AleatoirIrrig_NewCult.par') + + + +# .dbf +#------------- + +# dbf decoupee sur irrig +hrus_et_cantons$LANDUSEID[order(hrus_et_cantons$CAT)]<-culture_finale +hrus_et_cantons$IRRIG_TYPE[order(hrus_et_cantons$CAT)]<-irrigtype + +# dbf de toutes les hrus +hrus_irrigation_all <- read.dbf('~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig/AleatoirIrrig_hrus.dbf') +for (hrus in hrus_et_cantons$CAT){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$LANDUSEID[which(hrus_et_cantons$CAT == hrus)] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hrus_et_cantons$IRRIG_TYPE[which(hrus_et_cantons$CAT == hrus)] +} +hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT)] <-1 +hrus_irrigation_all$IRRIGATED[which(!(hrus_irrigation_all$CAT %in% hrus_et_cantons$CAT))] <-0 + +# 4/1/2015 correction ex-post pour rétablir 4, 12 ou 18 selon agri plaine, montagne, dombes: +hruparam <- Chargement_param('~/JAMS/modeldata/J2K_Rhone_Irrigation/parameter/','hrus_AleatoirIrrig_NewCult.par') +for (hrus in hruparam$V1){ + hrus_irrigation_all$LANDUSEID[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),11] + hrus_irrigation_all$IRRIGATED[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),15] + hrus_irrigation_all$IRRIG_TYPE[which(hrus_irrigation_all$CAT == hrus)] <- hruparam[which(hruparam$V1==hrus),16] +} +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_corr.dbf') + + +# écriture +write.dbf(hrus_et_cantons, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus_decoupees.dbf') +write.dbf(hrus_irrigation_all, '~/DATA/SIG_MDR/irrigation/shape_AleatoirIrrig_CultureNew/AleatoirIrrig_CN_hrus.dbf') + diff --git a/rScript/lib/rgate.R b/rScript/lib/rgate.R new file mode 100644 index 0000000000000000000000000000000000000000..77ec77682daf3cb0c03e6ed141b20c6aa8165b05 --- /dev/null +++ b/rScript/lib/rgate.R @@ -0,0 +1 @@ +source("lib/rgate/Stub.R") diff --git a/rScript/lib/rgate/Argument.R b/rScript/lib/rgate/Argument.R new file mode 100644 index 0000000000000000000000000000000000000000..54e8decabeebb887694a4f2447705d31c04f4068 --- /dev/null +++ b/rScript/lib/rgate/Argument.R @@ -0,0 +1,148 @@ +library(R6) + +# class Argument avec R6 +Argument <- R6Class("Argument", + list( # attributs et méthods public + name = "", + value = NULL, + + initialize = function(name) { + stopifnot(is.character(name), length(name) == 1) + + self$name <- name + } + ) +) + +# class Parameter avec R6 +Parameter <- R6Class("Parameter", inherit = Argument, +public = list( + initialize = function(name, value) { + stopifnot(is.character(value), length(value) == 1) + + self$value <- value + super$initialize(name = name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + return(NULL) + }, + + getValueAsNumeric = function() { + return(as.numeric(self$value)) + }, + + getValueAsList = function() { + return(as.list(fromJSON(self$value))) + }, + + serialize = function() { + return(list( + "Parameter" = list( + "name" = self$name, + "value" = self$value + ) + )) + } +)) + + + +# class Dictionary avec R6 +Dictionary <- R6Class("Dictionary", inherit = Argument, +public = list( + value = list(), + initialize = function(name) { + self$value = list() + + super$initialize(name) + }, + + display = function() { + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + }, + + addArgument = function(argument) { + self$value = append(self$value, argument) + invisible(self) + }, + + addParameter = function(name, value) { + self$addArgument(Parameter$new(name, value)) + }, + + getArgument = function(name) { + if(self$name == name) + return(self) + + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + return(arg) + } + + return(NULL) + }, + + getParameter = function(name) { + for(argument in self$value) { + arg = argument$getArgument(name) + if(!is.null(arg)) + if(class(arg) == "Parameter") + return(arg) + } + }, + + serialize = function() { + dico <- list( + "Dictionary" = list( + "name" = self$name + ) + ) + + value = list() + for(val in self$value) { + value = append(value, val$serialize()) + } + dico$Dictionary$value = value + + return(dico) + }, + + deserialize = function(dico) { + value_json = dico + + # Dictionary + for(i in seq_along(value_json$Dictionary$name)) { + + if(!is.null(value_json$Dictionary$name[[i]]) && !is.null(value_json$Dictionary$value[[i]])) { + dictionary = Dictionary$new(value_json$Dictionary$name[[i]]) + dictionary = dictionary$deserialize(value_json$Dictionary$value[[i]]) + + self$value = append(self$value, dictionary) + } + } + + # Paramater + for(i in seq_along(value_json$Parameter$name)) { + + if(!is.null(value_json$Parameter$name[[i]]) && !is.null(value_json$Parameter$value[[i]])) { + parameter = Parameter$new(value_json$Parameter$name[[i]], value_json$Parameter$value[[i]]) + + self$value = append(self$value, parameter) + } + } + + invisible(self) + } +)) diff --git a/rScript/lib/rgate/File.R b/rScript/lib/rgate/File.R new file mode 100644 index 0000000000000000000000000000000000000000..c91f8c0bd036438cce1c8af779c7d5dd3100b039 --- /dev/null +++ b/rScript/lib/rgate/File.R @@ -0,0 +1,180 @@ +library(R6) + +File <- R6Class("File", +public=list( + name = "", + path = "", + actif = TRUE, + + initialize = function(path, name) { + stopifnot(is.character(name), length(name) == 1) + stopifnot(is.character(path), length(path) == 1) + + self$actif=TRUE + + self$name = name + self$path = path + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + return(list( + "File" = list( + "name" = self$name, + "path" = self$path + ) + )) + } + return(list()) + } +)) + + + +ExecFile <- R6Class("ExecFile", inherit = File, +public = list( + cmd = "", + initialize = function(path, name, cmd) { + stopifnot(is.character(cmd), length(cmd) == 1) + + self$cmd = cmd + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$ExecFile = dico$File + dico$File = NULL + + dico$ExecFile$cmd = self$cmd + + return(dico) + } + return(list()) + } +)) + + + +OutputFile <- R6Class("OutputFile", inherit = File, +public = list( + initialize = function(path, name) { + super$initialize(path, name) + }, + + display = function() { + if(self$actif){ + json_line = toJSON(self$serialize()) + print(json_line) + return(json_line) + } + return("") + }, + + serialize = function() { + if(self$actif){ + dico = super$serialize() + + dico$OutputFile = dico$File + dico$File = NULL + + return(dico) + } + return(list()) + }, + + read = function() { + if(self$actif){ + json_line = paste0(readLines(paste0(self$path, self$name))) + line = fromJSON(json_line) + + return(line) + } + return("") + }, + + readAsDictionary = function() { + if(self$actif){ + dictionary_dict = Dictionary$new("outputFile") + + file = self$read() + output_dico = Dictionary$new("Outputs") + for(i in seq_along(file$Outputs)) { + dico = Dictionary$new(file$Outputs$Dictionary$name[[i]]) + dico$deserialize(file$Outputs$Dictionary$value[[i]]) + + output_dico$addArgument(dico) + } + dictionary_dict$addArgument(output_dico) + + error_dico = Dictionary$new("Error") + error_dico$addParameter("code", file$Error$code) + error_dico$addParameter("description", file$Error$description) + error_dico$addParameter("traceback", file$Error$traceback) + + dictionary_dict$addArgument(error_dico) + + check_dico = Dictionary$new("Check") + for(i in seq_along(file$Check)) { + dataFile_dico = Dictionary$new(file$Check$DataFile$fileName[[i]]) + dataFile_dico$addParameter("name", file$Check$DataFile$fileName[[i]]) + + dataFile_dico$addParameter("nbMissing", file$Check$DataFile$nbMissing[[i]]) + if(!length( file$Check$DataFile$missing[[i]]) == 0) + dataFile_dico$addParameter("missing", file$Check$DataFile$missing[[i]]) + + dataFile_dico$addParameter("nbIncorrect", file$Check$DataFile$nbIncorrect[[i]]) + if(!length( file$Check$DataFile$incorrect[[i]]) == 0) + dataFile_dico$addParameter("missing", file$Check$DataFile$incorrect[[i]]) + + check_dico$addArgument(dataFile_dico) + } + + dictionary_dict$addArgument(check_dico) + return(dictionary_dict) + } + return(Dictionary$new("")) + }, + + displayContent = function() { + if(self$actif){ + json_line = prettify(toJSON(self$read())) + print(json_line) + return(json_line) + } + return("") + }, + + writeOutput = function(dictionary) { + if(self$actif){ + file = toJSON(self$read()) + + to_add = paste0('"Outputs":[', improveSerialize(toJSON(dictionary$serialize())), ',') + file = paste0(append(strsplit(file, '"Outputs":\\[')[[1]], to_add, after=1)) + + write(prettify(file), paste0(self$path, self$name)) + } + invisible(self) + } + +)) diff --git a/rScript/lib/rgate/Stub.R b/rScript/lib/rgate/Stub.R new file mode 100644 index 0000000000000000000000000000000000000000..1acb4e9ba813a66dcbb71c7da838763072daa8b2 --- /dev/null +++ b/rScript/lib/rgate/Stub.R @@ -0,0 +1,211 @@ +library(R6) +library(jsonlite) + +source("lib/rgate/File.R") +source("lib/rgate/Argument.R") + +improveSerialize = function(serializedLine) { + serializedLine = gsub(':\\["', ':"', serializedLine) + serializedLine = gsub('"]', '"', serializedLine) + serializedLine = gsub('"]]', '"]', serializedLine) + serializedLine = gsub('"]}', '"}', serializedLine) + serializedLine = gsub( '"],','",', serializedLine) + return(serializedLine) + } +improveDeSerialize = function(serializedLine) { + serializedLine = gsub( ':"', ':\\["', serializedLine) + serializedLine = gsub( ': "', ':\\["', serializedLine) + serializedLine = gsub( '"]', '"]]', serializedLine) + serializedLine = gsub( '"}', '"]}', serializedLine) + serializedLine = gsub( '",', '"],', serializedLine) + return(serializedLine) + } + +Stub <- R6Class("Stub", +public = list( + outputFile = NA, + dictionaries = list(), + actif = TRUE, + + initialize = function(outputFile = NA) { + if(is.na(outputFile)) + outputFile = OutputFile$new("","") + + self$actif=TRUE + + self$outputFile = outputFile + }, + + displayDictionaries = function() { + if(self$actif) { + dictionaries = list() + dico = list() + + for(dictionary in self$dictionaries) + dictionaries = append(dictionaries, dictionary$serialize()) + + dico$Dictionaries = dictionaries + json_line = toJSON(dico) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayOutputFile = function() { + if(self$actif) { + json_line = toJSON(self$outputFile$serialize()) + + print(json_line) + + invisible(json_line) + } + invisible("") + }, + + displayAll = function() { + if(self$actif) { + invisible(paste0(self$displayOutputFile(), self$displayDictionaries())) + } + invisible("") + }, + + getArgument = function(name) { + if(self$actif) { + return(self$findArgumentWithName(name)) + } + return(NULL) + }, + + findArgumentWithName = function(name) { + if(self$actif) { + dico = NULL + for(dictionary in self$dictionaries) { + dico = dictionary$getArgument(name) + if(!is.null(dico)) + break + } + + return(dico) + } + return(NULL) + }, + + findDictionaryWithName = function(name) { + if(self$actif) {- + for(dico in self$dictionaries){ + if(dico$name == name) + return(dico) + } + } + return(NULL) + } +)) + + +SenderStub <- R6Class("SenderStub", inherit = Stub, +public = list( + execFile = NA, + initialize = function(execFile=NA, outputFile=NA) { + super$initialize(outputFile = outputFile) + self$execFile = execFile + }, + + run = function(gatePath, gateName) { + + print(" =============== Running gateway =============== ") + + system(paste0(gatePath , gateName , " '" , self$serialize() , "'")) + print(" =============== Gateway ending ================ ") + + invisible(self) + }, + + displayExecFile = function() { + + json_line = toJSON(self$execFile$serialize()) + + print(json_line) + return(json_line) + + return("") + }, + + displayAll = function() { + + invisible(paste0(self$displayExecFile(), super$displayAll())) + + invisible("") + }, + + + serialize = function() { + + dico = list( + "ExecFile" = self$execFile$serialize()$ExecFile, + "OutputFile" = self$outputFile$serialize()$OutputFile + ) + + dictionaries = list() + + for(dictionary in self$dictionaries) { + dictionaries = append(dictionaries, dictionary$serialize()) + } + + dico$Dictionaries = dictionaries + + return(improveSerialize(toJSON(dico))) + + return("") + } +)) + + +ReceiverStub <-R6Class("ReceiverStub", inherit = Stub, +public = list( + + initialize = function() { + super$initialize(NA) + self$deserialize(self$readArguments()) + }, + + readArguments = function() { + if(length(commandArgs(TRUE)) > 0) + return(paste(commandArgs(TRUE),collapse = ' ')) + + self$actif = FALSE + self$outputFile$actif = FALSE + + for(dico in self$dictionaries) { + dico$actif = FALSE + } + + return(-1) + }, + + deserialize = function(json_line) { + if(self$actif) { + if(json_line != -1) { + + json_line = improveDeSerialize(json_line) + data_line = fromJSON(json_line) + + self$outputFile = OutputFile$new(data_line$OutputFile$path[[1]], data_line$OutputFile$name[[1]]) + + dictionaries_json = data_line$Dictionaries + + for (i in seq_along(dictionaries_json)) { + dictionary = Dictionary$new(dictionaries_json[[i]]$name[[1]]) + dictionary$deserialize(dictionaries_json[[i]]$value[[1]]) + + self$dictionaries = append(self$dictionaries, dictionary) + } + } + } + + invisible(self) + } + +)) diff --git a/rScript/lib/utilitaire_irrigation.R b/rScript/lib/utilitaire_irrigation.R new file mode 100644 index 0000000000000000000000000000000000000000..bb249835e43ad61269626a7c0bec441040c0d48e --- /dev/null +++ b/rScript/lib/utilitaire_irrigation.R @@ -0,0 +1,455 @@ +# ============================================================================== +# By Theo L. intern at INRAE +# CREATED on May 16, 2022 +# +# +# ------------------------------------------------------------------------------ +# the objective is to regroup the essential function of the MDR_irrigated project +# in an easy to maintain and well documented file +# +# most of the functions that are in this file are comming from another R source +# file, to keep track of those the "From" boxes indicate the name of the original +# file +# ============================================================================== + +library(zoo) +library(xts) + + + +# ========================== +# ** From MDR_utilitaires ** +# ========================== + + +# -------------------------------add_param-------------------------------------- +# **** add an extra parameter to reach.par + +# add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) +# Adds a new parameter to the given reach file, with the given value and unit. +# Args: +# inputdir: The input directory +# oldreachfile: The old reach file +# newreachfile: The new reach file +# newparamName: The name of the new parameter +# newparamVal: The value of the new parameter +# newparamUnit: The unit of the new parameter + +# The point of this function is to add a new parameter to an oldreachfile and create a newreachfile with the new parameter included. +# The function takes the inputdir (input directory), oldreachfile, newreachfile, newparamName, newparamVal, and newparamUnit as arguments. +# it then starts by finding the number of lines in the oldreachfile, then reads the header line and finds the line where the ID is located. +# Then the function reads in the oldreachfile, adds the new paramName to the file, and creates a new Min, Max, and Unit file with the new parameter included. +# Finally, the function writes the newreachfile with the new parameter included. +# ------------------------------------------------------------------------------ +add_param <- function(inputdir, oldreachfile, newreachfile, newparamName, newparamVal, newparamUnit) { + + nbLines <- skip_lines(inputdir,oldreachfile) + headerReach <- readLines(paste0(inputdir, oldreachfile), n = nbLines) + LinesNames <- which(substr(headerReach,1,2)=="ID") + Names <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames-1) + Names <- cbind(Names,newparamName) + Min <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames) + Min <- cbind(Min,0,0) + Max <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+1) + Max <- cbind(Max,9999999,9999999) + Unit <- read.table(paste0(inputdir, oldreachfile), nr=1, skip=LinesNames+2) + Unit <- cbind(Unit,newparamUnit) + reach <- Chargement_param(inputdir,oldreachfile) + reach <- cbind(reach,newparamVal) + + write.table (Names, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=F) + write.table (Min, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Max, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (Unit, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) + write.table (reach, paste0(inputdir, newreachfile), col.names=F, row.names=F, quote=F, sep='\t', append=T) +} + + +# -------------------------------Chargement_param------------------------------- +# Chargement_param <- function(chemin,Name) +# Loads all parameters from a given file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The parameters as a data frame + +# - The code is able to identify the line with the first values and skip the initial text lines +# Caveats: it may not work for files with less than 3 lines of data. +# ------------------------------------------------------------------------------ +Chargement_param <- function(chemin, Name) { + # initialization + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + + # loop until we find a line with 3 numeric value + while(length(na.omit(obj))==0 | length(na.omit(obj2))==0 | length(na.omit(obj3))==0) { + + obj <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k, colClasses="character"))[1] + obj2 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+1, colClasses="character"))[1] + obj3 <- as.numeric(read.table(paste0(chemin, Name), nrow=1, skip=k+2, colClasses="character"))[1] + k <- k+1 + } + + # get the number of line to skip to get the data + nbLines <- k - 1 + + # load the data + data <- read.table(paste0(chemin, Name), skip=nbLines) + mycolnames <- apply(read.table(paste0(chemin, Name), nrow=1)[1,], 1, as.character) + colnames(data) <- mycolnames + + return(data) +} + + +# -------------------------------write_new_paramfile---------------------------- +# write_new_paramfile=<-(oldfile, newvalues ,newfile) +# Writes the new combination of HRUs to the original parameter file. +# Args: +# oldfile: The original parameter file +# newvalues: A vector of the HRUs to write to the new parameter file +# newfile: The new parameter file + +# write a modified params file using the header of the old one +# ------------------------------------------------------------------------------ +write_new_paramfile <- function(oldfile, newvalues ,newfile) { + # get the header + nb_lines <- skip_lines(oldfile) + header <- readLines(oldfile, n = nb_lines) + + # write + write.table(header, newfile, sep = '\t', col.names = F, row.names = F, quote = F) + write.table(newvalues, newfile, col.names = F, row.names = F, quote = F, append = TRUE, sep = '\t') +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(file) +# Finds the number of lines to skip before the data starts in file. +# Args: +# file: The file to be read +# Returns: +# The number of lines to skip before the data starts in file +# ------------------------------------------------------------------------------ +skip_lines <- function(file){ + k <- 0 + obj <- NULL; obj2 <- NULL; obj3 <- NULL + while (length(na.omit(obj)) == 0 | length(na.omit(obj2)) == 0 | length(na.omit(obj3)) == 0) { + + obj <- as.numeric(read.table(file, nrow = 1, skip = k, colClasses = "character"))[1] + obj2 <- as.numeric(read.table(file, nrow = 1, skip = k + 1, colClasses = "character"))[1] + obj3 <- as.numeric(read.table(file, nrow = 1, skip = k + 2, colClasses = "character"))[1] + + k <- k + 1 + } + return(k - 1) +} + + +# -------------------------------skip_lines------------------------------------- +# skip_lines <- function(chemin, Name) +# Finds the number of lines to skip in order to reach the data in a file. +# Args: +# chemin: The path to the file +# Name: The name of the file +# Returns: +# The number of lines to skip +# ------------------------------------------------------------------------------ +skip_lines <- function(chemin, Name) { + return(skip_lines(paste0(chemin, Name))) +} + + +# -------------------------------luid2cult-------------------------------------- +# luid2cult <- function(vect_luid) +# Converts a vector of J2000 culture codes to their corresponding name. +# Args: +# vect_luid: A vector of J2000 culture codes +# Returns: +# A vector containing the corresponding names + +# The point of this function is to match a vector of numbers with a vector of strings. +# ------------------------------------------------------------------------------ +luid2cult <- function(vect_luid) { + cultures <- c('Vigne', 'Mais', 'Tournesol', 'Blé dur', 'Maraichage', 'PdT', 'Vergers', 'Prairies', 'Protéagineux', 'Riz', "Jachère","Divers", "Industrielles") + numJ2000_cultures <- 19:31 + res <- apply(as.matrix(vect_luid), 2, function(X){cultures[match(X, numJ2000_cultures)]}) + return(as.vector(res)) # Not sure this is necessary... +} + +vec <- luid2cult(18:25) + + +# =================================== +# ** From readwritefunctions_J2000 ** +# =================================== + +# -------------------------------ReadLoopDaily---------------------------------- +# ReadLoopDaily <- function(folder, file,filtre) +# Reads a "daily file" and returns the data and the corresponding dates. +# Args: +# folder: The folder where the file is located +# file: The name of the file +# filtre: A logical indicating whether to filter the data or not +# Returns: +# A list containing the dates and the data + +# 1. it starts by reading the length of the first data block and the number of blocks in the file +# 2. then it reads the file's headers (column names) +# 3. finally it reads the data block by block, adding a day to the date at each block, until the end of the file +# ------------------------------------------------------------------------------ +ReadLoopDaily <- function(folder, file, filtre) { + # Open the file + con <- file(paste0(folder, file)) + open(con) + + # Be careful as we remain in the same connection we must count the lines from the current line read (not from the beginning of the file) + + # Read the nb of elements (HRUs or Reaches) (length of the blocks) + Lblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Lblocks <- Lblocks[,3] + + # Get the nb of time steps fo the simulation (nb of blocks of the file) + Nblocks <- read.table(con, nrows = 1, sep = "\t", skip = 1) + Nblocks <- Nblocks[,3] + + # Get the col names (names of the simulated variables) + if (filtre == T) {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3)} else {Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2)} + #Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 2) + + # Get the start date of the simulation (we consider only the date -> daily time step) + Datestart <- read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + Datestart <- Datestart[,2] + if (filtre == T) { + read.table(con, nrows = 1, sep = "\t") + count <- length(Colnames)+1 + compt <- 0 + while (count == (length(Colnames)+1)) { + obj <- read.table(con, nrows = 1, sep = "\t") + count <- dim(obj)[2] + compt <- compt + 1 + } + Lblocks <- compt-1 + con<-file(paste0(folder, file)) + open(con) + read.table(con, nrows = 1, sep = "\t", skip = 1) + read.table(con, nrows = 1, sep = "\t", skip = 1) + Colnames <- scan(con, what = "", nlines = 1, sep = "", skip = 3) + # Get the start date of the simulation (we consider only the date -> daily time step) + read.table(con, as.is = TRUE, nrows = 1, sep = "", skip = 3) + } + + # Read the data + # Initialisation of a matrix of the correct size to store the data + # nrow = nb of time steps * nb of elts + # ncol = nb of simulated variables + ncol <-length(Colnames) + Data <- matrix(nrow=Nblocks*Lblocks,ncol=ncol) + # Loop on the nb of blocks + for (i in 0:(Nblocks -1)) + { + # Read the block of data + # if i=0 (first block skip only 1 line) + if(i==0) + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 1, colClasses="numeric") + # else skip 3 lines + else + Datatemp <- read.table(con, nrows = Lblocks, sep = "", skip = 3, colClasses="numeric") + + # Add the values to the matrix + Data[(i*Lblocks+1):((i+1)*Lblocks),1:ncol] <- as.matrix(Datatemp) + } + + # close the file + close(con) + + # Add the colnames + colnames(Data) <- Colnames + # Create the corresponding vector of dates + dates <- as.character(seq(from = as.Date(Datestart), length.out = Nblocks, by = "day")) + + # Return the vector of dates and the data as a list + list(dates=dates, Data=Data) +} + + + +# ======================== +# ** From zoo_functions ** +# ======================== + + +# -------------------------------aggregateZoo----------------------------------- +# aggregateZoo <- function (z, timeStep, sumOrMeanFunction) +# Aggregates the given zoo object over the given time step. +# Args: +# z: A zoo object +# timeStep: The time step over which to aggregate the zoo object +# Possible values: ["dmy","my","m","sy","s"] +# sumOrMeanFunction: The function to apply to the aggregated zoo object +# Possible values: [sum,mean] +# Returns: +# The aggregated zoo object + +# The point of this R function is to aggregate data over different time steps. +# The different time steps that are supported are "dmy", "my", "m", "sy", and "s". +# For each time step, the function will either take the sum or mean of the data. +# ------------------------------------------------------------------------------ +aggregateZoo <- function (z, timeStep, sumOrMeanFunction) { + #Retourne un nouveau objet zoo aggr?g? sur le pas de temps timeStep + #(["dmy","my","m","sy","s"]) en faisant la somme ou la moyenne ([sum,mean]) + + if(timeStep == "dmy"){ + return (aggregate(z, time(z) - as.numeric(time(z)) %% 1, sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "my"){ + return (aggregate(z, as.Date(as.yearmon(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "y"){ + return (aggregate(z, format(as.Date(index(z)), '%y'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "m"){ + return (aggregate(z, format(as.Date(index(z)), '%m'), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "sy"){ + return (aggregate(z, as.Date(as.yearqtr(time(z))), sumOrMeanFunction,na.rm = TRUE)) + } + if(timeStep == "s"){ + return (aggregate(z, quarters(time(z)), sumOrMeanFunction,na.rm = TRUE)) + } + + print("Type not understood") +} + + + +# =================================== +# ** From functions_post_treatment ** +# =================================== + + +# -------------------------------Topologie-------------------------------------- +# Topologie <- function (brin,reach) +# find all of the reachable nodes from a given node in a graph +# Args: +# brin: The index of the given HRU | brin pour lequel on veut la topologie +# reach: A matrix containing the indices of all HRUs and their downstream HRUs | le fichier parametre reach.par charge +# Returns: +# A vector containing the indices of all HRUs upstream of the given HRU | la liste des brins en amont du brin choisi + +# take in a vector of reachable nodes from a given node, and return a vector of all nodes that can be reached from the original node. +# Remontee depuis le brin choisi jusqu'a l'amont du bassin +# ------------------------------------------------------------------------------ +Topologie <- function (brin, reach) { + IDs <- NULL + Brin0 <- brin + + for (indice in 1:1000){ + assign(paste0('Brin', indice), NULL) + } + k <- 0 + + while (length(get(paste0('Brin', k)))!=0){ + for (i in seq_along(get(paste0('Brin', k)))){ + assign(paste0('Brin', k + 1), c(get(paste0('Brin', k + 1)), reach[which(reach[, 2]== get(paste0('Brin', k))[i]), 1])) + } + k <- k+1 + } + Total <- brin + for (l in 1:k){ + Total <- unique(c(Total,get(paste0('Brin', l)))) + } + Total +} + + + +# ========================== +# ** From MDR_AERMCprelev ** +# ========================== + + +# -------------------------------Prelev82_1987_2007----------------------------- +# Prelev82_1987_2007 <- function() +# Calculates the mean annual water withdrawal across all cantons over 1987-2007 +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding mean annual water withdrawal +# ------------------------------------------------------------------------------ +Prelev82_1987_2007 <- function() { # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev82 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev82 <- rbind(Prelev82, mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev82data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev82) + colnames(Prelev82data) <- c('canton', 'Prelev82') + + return(Prelev82data) +} + + +# -------------------------------Prelev8182_1987_2007--------------------------- +# Prelev8182_1987_2007 <- function() +# Calculates the mean annual water withdrawals from 1981 to 1982 for each canton. +# Args: +# None +# Returns: +# A dataframe with the canton in the first column and the mean annual water withdrawals in the second column +# ------------------------------------------------------------------------------ +Prelev8182_1987_2007 <- function(){ # m3/yr + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[, 1] == cant), 5] * 1000 #(m3) + Prelev81_ann <- Prelev[which(Prelev[, 1] == cant), 4] * 1000 #(m3) + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[, 1] == cant), 2]), format="%Y") + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) + + Prelev8182 <- rbind(Prelev8182, mean(Prelev81_ann["1987/2007"])+mean(Prelev82_ann["1987/2007"])) # prélèvement annuel moyen sur 1987-2007 + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) + colnames(Prelev8182data) <- c('canton', 'Prelev8182') + + return(Prelev8182data) +} + + +# -------------------------------Prelev8182_2008_2012--------------------------- +# Prelev8182_2008_2012 <- function() +# Finds the average annual water withdrawals for all cantons from 2008-2012. +# Args: +# None +# Returns: +# A dataframe containing the canton and the corresponding average annual water withdrawals +# ------------------------------------------------------------------------------ +# m3/yr +Prelev8182_2008_2012 <- function(){ + Prelev <- read.table('~/Documents/MDR/irrigation/Chronique_PrelevRMC_Cantons.txt',header=T) # create a data.frame from the .txt file + + Prelev8182 <- NULL + for (cant in Prelev$Canton[order(unique(Prelev$Canton))]){ + Prelev82_ann <- Prelev[which(Prelev[,1] == cant),5] * 1000 #(m3) create a vector with all the annual data of the first canal for the canton cant + Prelev81_ann <- Prelev[which(Prelev[,1] == cant),4] * 1000 #(m3) same but with the second canal + Prelev_date <- as.Date(as.character(Prelev[which(Prelev[,1] == cant),2]), format="%Y") # create a vector with the date of all the data + + Prelev82_ann <- xts(Prelev82_ann, Prelev_date) # create a time series with the data of the first canal and their date + Prelev81_ann <- xts(Prelev81_ann, Prelev_date) # same with the second canal + + Prelev8182 <- rbind(Prelev8182,mean(Prelev81_ann["2008/2012"])+mean(Prelev82_ann["2008/2012"])) # add to a vector the mean of the time series between 2008 and 2012 (5 years) + } + Prelev8182data <- cbind(Prelev$Canton[order(unique(Prelev$Canton))], Prelev8182) # create a data.frame with the canton and the mean of 5 years + colnames(Prelev8182data) <- c('canton','Prelev8182') # give a name to the columns + + return(Prelev8182data) +} +