%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
If you submit the job with job scheduler; below are list of enviroment variable one can pass
local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'
%env ychunk='2', #%env tchunk='2'
controls chunk. 'False' sets no modification from original netcdf file's chunk.
ychunk=10 will group the original netcdf file to 10 by 10
tchunk=1 will chunk the time coordinate one by one
%env file_exp=
'file_exp': Which 'experiment' name is it? this corresopnds to intake catalog name without path and .yaml
#%env year=
for Validation, this correspoinds to path/year/month 's year for monitoring, this corresponids to 'date' having means do all files in the monitoring directory setting it as 0[0-9] &1[0-9]& [2-3][0-9], the job can be separated in three lots. For DELTA experiment, year corresponds to really 'year'
%env month=
for monitoring this corresponds to file path path-XIOS.{month}/
For DELTA experiment, year corresponds to really 'month'
proceed saving? True or False , Default is setted as True
proceed plotting? True or False , Default is setted as True
proceed computation? or just load computed result? True or False , Default is setted as True
save output file used for plotting
using kerchunked file -> False, not using kerhcunk -> True
name of control file to be used for computation/plots/save/ We have number of M_xxx.csv
Monitor.sh calls M_MLD_2D
and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh, Integrals.sh , Sections.sh
M_AWTMD
M_Fluxnet
M_Ice_quantities
M_IceClim M_IceConce M_IceThick
M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly
M_Mean_temp_velo M_Mooring
M_Sectionx M_Sectiony
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene4765.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 rome local cluster starting This code is running on irene4765.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 06 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6476183irene4765.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_06M_Fluxnet/ CPU times: user 530 ms, sys: 130 ms, total: 660 ms Wall time: 18.7 s
Client-6976a108-1970-11ed-b6df-080038b93421
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
29db7c92
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-c61f1fa9-b2bc-4ed7-9b8d-18e67137283e
Comm: tcp://127.0.0.1:34064 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:46768 | Total threads: 8 |
Dashboard: http://127.0.0.1:42283/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45782 | |
Local directory: /tmp/dask-worker-space/worker-xt1j_cv6 |
Comm: tcp://127.0.0.1:40241 | Total threads: 8 |
Dashboard: http://127.0.0.1:42473/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:39346 | |
Local directory: /tmp/dask-worker-space/worker-_k9ns9h_ |
Comm: tcp://127.0.0.1:33583 | Total threads: 8 |
Dashboard: http://127.0.0.1:46791/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:41592 | |
Local directory: /tmp/dask-worker-space/worker-nzon8cic |
Comm: tcp://127.0.0.1:45267 | Total threads: 8 |
Dashboard: http://127.0.0.1:33398/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45705 | |
Local directory: /tmp/dask-worker-space/worker-_l9u07xe |
Comm: tcp://127.0.0.1:37173 | Total threads: 8 |
Dashboard: http://127.0.0.1:43087/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38105 | |
Local directory: /tmp/dask-worker-space/worker-l_xxao3g |
Comm: tcp://127.0.0.1:34780 | Total threads: 8 |
Dashboard: http://127.0.0.1:41100/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46580 | |
Local directory: /tmp/dask-worker-space/worker-gq3o1y_i |
Comm: tcp://127.0.0.1:40036 | Total threads: 8 |
Dashboard: http://127.0.0.1:38316/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:39522 | |
Local directory: /tmp/dask-worker-space/worker-npmxaibp |
Comm: tcp://127.0.0.1:34764 | Total threads: 8 |
Dashboard: http://127.0.0.1:42676/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33579 | |
Local directory: /tmp/dask-worker-space/worker-vlhfh7gw |
Comm: tcp://127.0.0.1:40065 | Total threads: 8 |
Dashboard: http://127.0.0.1:40498/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44093 | |
Local directory: /tmp/dask-worker-space/worker-qqt9ai8o |
Comm: tcp://127.0.0.1:36516 | Total threads: 8 |
Dashboard: http://127.0.0.1:34247/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33412 | |
Local directory: /tmp/dask-worker-space/worker-vch9vl47 |
Comm: tcp://127.0.0.1:33372 | Total threads: 8 |
Dashboard: http://127.0.0.1:37899/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43246 | |
Local directory: /tmp/dask-worker-space/worker-8mkie26t |
Comm: tcp://127.0.0.1:38367 | Total threads: 8 |
Dashboard: http://127.0.0.1:41179/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46396 | |
Local directory: /tmp/dask-worker-space/worker-yl6osose |
Comm: tcp://127.0.0.1:38870 | Total threads: 8 |
Dashboard: http://127.0.0.1:46668/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43586 | |
Local directory: /tmp/dask-worker-space/worker-0ilwda1m |
Comm: tcp://127.0.0.1:43933 | Total threads: 8 |
Dashboard: http://127.0.0.1:34982/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40090 | |
Local directory: /tmp/dask-worker-space/worker-u7fa5ewr |
Comm: tcp://127.0.0.1:41361 | Total threads: 8 |
Dashboard: http://127.0.0.1:45578/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40840 | |
Local directory: /tmp/dask-worker-space/worker-ayd_9o1t |
Comm: tcp://127.0.0.1:42002 | Total threads: 8 |
Dashboard: http://127.0.0.1:39999/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35127 | |
Local directory: /tmp/dask-worker-space/worker-g0lp0hm9 |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
Fluxnet | gridV.vomecrty,param.e3v_0,param.e1v,param.mas... | calc.Fluxnet(data) | FramS_All | Fluxnet_integrals | None | ((-10,10),(-10,50) ,(-150,50),(-25,5) ) | (Sv,TW, mSv,10^-2 Sv) | I-6 | ||
Fluxnet | gridV.vomecrty,param.e3v_0,param.e1v,param.mas... | calc.Fluxnet(data) | Davis | Fluxnet_integrals | None | ((-5.0,5.0),(-25,27) ,(-200,50),(-9,5) ) | (Sv,TW, mSv,10^-2 Sv) | I-6 | ||
Fluxnet | gridV.vomecrty,param.e3v_0,param.e1v,param.mas... | calc.Fluxnet(data) | Bering | Fluxnet_integrals | None | ((-2,2),(-10,50) ,(-150,50),(-2,4) ) | (Sv,TW, mSv,10^-2 Sv) | I-6 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= False df.Inputs != nothing True lazy= False CPU times: user 378 µs, sys: 58 µs, total: 436 µs Wall time: 411 µs
0
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= False #save= False #plot= True Value='Fluxnet' Zone='FramS_All' Plot='Fluxnet_integrals' cmap='None' clabel='(Sv,TW, mSv,10^-2 Sv)' clim= ((-10, 10), (-10, 50), (-150, 50), (-25, 5)) outputpath='../results/SEDNA_DELTA_MONITOR/' nc_outputpath='../nc_results/SEDNA_DELTA_MONITOR/' filename='SEDNA_Fluxnet_integrals_FramS_All_Fluxnet' #3 no computing , loading starts data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename) start loading data load 1Dnc file from ../nc_results/SEDNA_DELTA_MONITOR/../*/SEDNA_Fluxnet_integrals_FramS_All_Fluxnet*.nc
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1317, in Dataset._construct_dataarray(self, name) 1316 try: -> 1317 variable = self._variables[name] 1318 except KeyError: KeyError: 'time_centered' During handling of the above exception, another exception occurred: KeyError Traceback (most recent call last) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:556, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 555 try: --> 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:556, in <listcomp>(.0) 555 try: --> 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1410, in Dataset.__getitem__(self, key) 1409 if utils.hashable(key): -> 1410 return self._construct_dataarray(key) 1411 if utils.iterable_of_hashable(key): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1319, in Dataset._construct_dataarray(self, name) 1318 except KeyError: -> 1319 _, name, variable = _get_virtual_variable(self._variables, name, self.dims) 1321 needed_dims = set(variable.dims) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:175, in _get_virtual_variable(variables, key, dim_sizes) 174 if len(split_key) != 2: --> 175 raise KeyError(key) 177 ref_name, var_name = split_key KeyError: 'time_centered' During handling of the above exception, another exception occurred: ValueError Traceback (most recent call last) File <timed eval>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:79, in auto(df, val, savefig, daskreport, outputpath, file_exp) 77 print('data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename)' ) 78 with performance_report(filename=daskreport+"_calc_"+step.Value+".html"): ---> 79 data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename) 80 #saveswitch=False 82 display(data) File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:32, in load_data(plot, path, filename) 30 print('start loading data') 31 if 'int' in plot: ---> 32 data=load_integral(path,filename) 33 elif 'Mooring' in plot: 34 data=load_integral(path,filename) File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:74, in load_integral(path, filename) 72 filesave=path+'../*/'+filename+'*.nc' 73 print('load 1Dnc file from',filesave) ---> 74 return xr.open_mfdataset(filesave 75 ,compat='override' 76 ,data_vars='minimal' 77 ,concat_dim=('t') 78 ,combine='nested' 79 ,coords='minimal') File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:987, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs) 983 try: 984 if combine == "nested": 985 # Combined nested list by successive concat and merge operations 986 # along each dimension, using structure given by "ids" --> 987 combined = _nested_combine( 988 datasets, 989 concat_dims=concat_dim, 990 compat=compat, 991 data_vars=data_vars, 992 coords=coords, 993 ids=ids, 994 join=join, 995 combine_attrs=combine_attrs, 996 ) 997 elif combine == "by_coords": 998 # Redo ordering from coordinates, ignoring how they were ordered 999 # previously 1000 combined = combine_by_coords( 1001 datasets, 1002 compat=compat, (...) 1006 combine_attrs=combine_attrs, 1007 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:365, in _nested_combine(datasets, concat_dims, compat, data_vars, coords, ids, fill_value, join, combine_attrs) 362 _check_shape_tile_ids(combined_ids) 364 # Apply series of concatenate or merge operations along each dimension --> 365 combined = _combine_nd( 366 combined_ids, 367 concat_dims, 368 compat=compat, 369 data_vars=data_vars, 370 coords=coords, 371 fill_value=fill_value, 372 join=join, 373 combine_attrs=combine_attrs, 374 ) 375 return combined File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:239, in _combine_nd(combined_ids, concat_dims, data_vars, coords, compat, fill_value, join, combine_attrs) 235 # Each iteration of this loop reduces the length of the tile_ids tuples 236 # by one. It always combines along the first dimension, removing the first 237 # element of the tuple 238 for concat_dim in concat_dims: --> 239 combined_ids = _combine_all_along_first_dim( 240 combined_ids, 241 dim=concat_dim, 242 data_vars=data_vars, 243 coords=coords, 244 compat=compat, 245 fill_value=fill_value, 246 join=join, 247 combine_attrs=combine_attrs, 248 ) 249 (combined_ds,) = combined_ids.values() 250 return combined_ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:275, in _combine_all_along_first_dim(combined_ids, dim, data_vars, coords, compat, fill_value, join, combine_attrs) 273 combined_ids = dict(sorted(group)) 274 datasets = combined_ids.values() --> 275 new_combined_ids[new_id] = _combine_1d( 276 datasets, dim, compat, data_vars, coords, fill_value, join, combine_attrs 277 ) 278 return new_combined_ids File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:298, in _combine_1d(datasets, concat_dim, compat, data_vars, coords, fill_value, join, combine_attrs) 296 if concat_dim is not None: 297 try: --> 298 combined = concat( 299 datasets, 300 dim=concat_dim, 301 data_vars=data_vars, 302 coords=coords, 303 compat=compat, 304 fill_value=fill_value, 305 join=join, 306 combine_attrs=combine_attrs, 307 ) 308 except ValueError as err: 309 if "encountered unexpected variable" in str(err): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:243, in concat(objs, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 231 return _dataarray_concat( 232 objs, 233 dim=dim, (...) 240 combine_attrs=combine_attrs, 241 ) 242 elif isinstance(first_obj, Dataset): --> 243 return _dataset_concat( 244 objs, 245 dim=dim, 246 data_vars=data_vars, 247 coords=coords, 248 compat=compat, 249 positions=positions, 250 fill_value=fill_value, 251 join=join, 252 combine_attrs=combine_attrs, 253 ) 254 else: 255 raise TypeError( 256 "can only concatenate xarray Dataset and DataArray " 257 f"objects, got {type(first_obj)}" 258 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:558, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: --> 558 raise ValueError(f"{name!r} is not present in all datasets.") 560 # Try concatenate the indexes, concatenate the variables when no index 561 # is found on all datasets. 562 indexes: list[Index] = list(get_indexes(name)) ValueError: 'time_centered' is not present in all datasets.