%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
If you submit the job with job scheduler; below are list of enviroment variable one can pass
local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'
%env ychunk='2', #%env tchunk='2'
controls chunk. 'False' sets no modification from original netcdf file's chunk.
ychunk=10 will group the original netcdf file to 10 by 10
tchunk=1 will chunk the time coordinate one by one
%env file_exp=
'file_exp': Which 'experiment' name is it? this corresopnds to intake catalog name without path and .yaml
#%env year=
for Validation, this correspoinds to path/year/month 's year for monitoring, this corresponids to 'date' having means do all files in the monitoring directory setting it as 0[0-9] &1[0-9]& [2-3][0-9], the job can be separated in three lots. For DELTA experiment, year corresponds to really 'year'
%env month=
for monitoring this corresponds to file path path-XIOS.{month}/
For DELTA experiment, year corresponds to really 'month'
proceed saving? True or False , Default is setted as True
proceed plotting? True or False , Default is setted as True
proceed computation? or just load computed result? True or False , Default is setted as True
save output file used for plotting
using kerchunked file -> False, not using kerhcunk -> True
name of control file to be used for computation/plots/save/ We have number of M_xxx.csv
Monitor.sh calls M_MLD_2D
and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh, Integrals.sh , Sections.sh
M_AWTMD
M_Fluxnet
M_Ice_quantities
M_IceClim M_IceConce M_IceThick
M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly
M_Mean_temp_velo M_Mooring
M_Sectionx M_Sectiony
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene5422.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 rome local cluster starting This code is running on irene5422.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 06 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6476197irene5422.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_06M_Mooring/ CPU times: user 533 ms, sys: 119 ms, total: 652 ms Wall time: 19.5 s
Client-3c21bebf-1970-11ed-ae83-080038b945b1
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
b1ab871b
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-05e5b1f4-80f3-480a-8465-c425ab749cea
Comm: tcp://127.0.0.1:38037 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:35647 | Total threads: 8 |
Dashboard: http://127.0.0.1:34066/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46257 | |
Local directory: /tmp/dask-worker-space/worker-v_7sg5gy |
Comm: tcp://127.0.0.1:37462 | Total threads: 8 |
Dashboard: http://127.0.0.1:44560/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:42877 | |
Local directory: /tmp/dask-worker-space/worker-mw9zvvhi |
Comm: tcp://127.0.0.1:33298 | Total threads: 8 |
Dashboard: http://127.0.0.1:39958/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40692 | |
Local directory: /tmp/dask-worker-space/worker-z2dve8dy |
Comm: tcp://127.0.0.1:38150 | Total threads: 8 |
Dashboard: http://127.0.0.1:41497/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35999 | |
Local directory: /tmp/dask-worker-space/worker-jv16q8rv |
Comm: tcp://127.0.0.1:34645 | Total threads: 8 |
Dashboard: http://127.0.0.1:45607/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:37343 | |
Local directory: /tmp/dask-worker-space/worker-idjtzvd_ |
Comm: tcp://127.0.0.1:39063 | Total threads: 8 |
Dashboard: http://127.0.0.1:34484/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40977 | |
Local directory: /tmp/dask-worker-space/worker-bzk2kym6 |
Comm: tcp://127.0.0.1:43887 | Total threads: 8 |
Dashboard: http://127.0.0.1:44889/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43504 | |
Local directory: /tmp/dask-worker-space/worker-5p6kygv8 |
Comm: tcp://127.0.0.1:37201 | Total threads: 8 |
Dashboard: http://127.0.0.1:45841/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:32985 | |
Local directory: /tmp/dask-worker-space/worker-3lwdokjx |
Comm: tcp://127.0.0.1:46691 | Total threads: 8 |
Dashboard: http://127.0.0.1:43132/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45232 | |
Local directory: /tmp/dask-worker-space/worker-4vnfcxr2 |
Comm: tcp://127.0.0.1:35971 | Total threads: 8 |
Dashboard: http://127.0.0.1:40814/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33641 | |
Local directory: /tmp/dask-worker-space/worker-1mwg9tdx |
Comm: tcp://127.0.0.1:37919 | Total threads: 8 |
Dashboard: http://127.0.0.1:33154/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34769 | |
Local directory: /tmp/dask-worker-space/worker-1qnuao44 |
Comm: tcp://127.0.0.1:37646 | Total threads: 8 |
Dashboard: http://127.0.0.1:39848/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34829 | |
Local directory: /tmp/dask-worker-space/worker-hfd_ut97 |
Comm: tcp://127.0.0.1:35719 | Total threads: 8 |
Dashboard: http://127.0.0.1:45130/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:42470 | |
Local directory: /tmp/dask-worker-space/worker-k0niino7 |
Comm: tcp://127.0.0.1:39311 | Total threads: 8 |
Dashboard: http://127.0.0.1:35361/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44200 | |
Local directory: /tmp/dask-worker-space/worker-53szfi19 |
Comm: tcp://127.0.0.1:43032 | Total threads: 8 |
Dashboard: http://127.0.0.1:33945/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38857 | |
Local directory: /tmp/dask-worker-space/worker-p3dw9fsl |
Comm: tcp://127.0.0.1:43343 | Total threads: 8 |
Dashboard: http://127.0.0.1:36976/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35837 | |
Local directory: /tmp/dask-worker-space/worker-49ssmt_k |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
Mooring_Arc_B | gridS.vosaline,gridT.votemper,param.depth,para... | data | Arc_B | Mooring | rainbow | {'vosaline': (28.0,34.4), 'votemper': (-2.0,2.0)} | None | x | ||
Mooring_Eur_B | gridS.vosaline,gridT.votemper,param.depth,para... | data | Eur_B | Mooring | rainbow | {'vosaline': (32.0,35.0), 'votemper': (-2.0,3.0)} | None | x |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= False df.Inputs != nothing True lazy= False CPU times: user 323 µs, sys: 52 µs, total: 375 µs Wall time: 375 µs
0
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= False #save= False #plot= True Value='Mooring_Arc_B' Zone='Arc_B' Plot='Mooring' cmap='rainbow' clabel='None' clim= {'vosaline': (28.0, 34.4), 'votemper': (-2.0, 2.0)} outputpath='../results/SEDNA_DELTA_MONITOR/' nc_outputpath='../nc_results/SEDNA_DELTA_MONITOR/' filename='SEDNA_Mooring_Arc_B_Mooring_Arc_B' #3 no computing , loading starts data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename) start loading data load 1Dnc file from ../nc_results/SEDNA_DELTA_MONITOR/../*/SEDNA_Mooring_Arc_B_Mooring_Arc_B*.nc
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1317, in Dataset._construct_dataarray(self, name) 1316 try: -> 1317 variable = self._variables[name] 1318 except KeyError: KeyError: 'time_centered' During handling of the above exception, another exception occurred: KeyError Traceback (most recent call last) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:556, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 555 try: --> 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:556, in <listcomp>(.0) 555 try: --> 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1410, in Dataset.__getitem__(self, key) 1409 if utils.hashable(key): -> 1410 return self._construct_dataarray(key) 1411 if utils.iterable_of_hashable(key): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:1319, in Dataset._construct_dataarray(self, name) 1318 except KeyError: -> 1319 _, name, variable = _get_virtual_variable(self._variables, name, self.dims) 1321 needed_dims = set(variable.dims) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/dataset.py:175, in _get_virtual_variable(variables, key, dim_sizes) 174 if len(split_key) != 2: --> 175 raise KeyError(key) 177 ref_name, var_name = split_key KeyError: 'time_centered' During handling of the above exception, another exception occurred: ValueError Traceback (most recent call last) File <timed eval>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:79, in auto(df, val, savefig, daskreport, outputpath, file_exp) 77 print('data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename)' ) 78 with performance_report(filename=daskreport+"_calc_"+step.Value+".html"): ---> 79 data=save.load_data(plot=Plot,path=nc_outputpath,filename=filename) 80 #saveswitch=False 82 display(data) File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:34, in load_data(plot, path, filename) 32 data=load_integral(path,filename) 33 elif 'Mooring' in plot: ---> 34 data=load_integral(path,filename) 35 elif 'section' in plot: 36 data=load_twoD(path,filename,nested=False) File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:74, in load_integral(path, filename) 72 filesave=path+'../*/'+filename+'*.nc' 73 print('load 1Dnc file from',filesave) ---> 74 return xr.open_mfdataset(filesave 75 ,compat='override' 76 ,data_vars='minimal' 77 ,concat_dim=('t') 78 ,combine='nested' 79 ,coords='minimal') File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:987, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs) 983 try: 984 if combine == "nested": 985 # Combined nested list by successive concat and merge operations 986 # along each dimension, using structure given by "ids" --> 987 combined = _nested_combine( 988 datasets, 989 concat_dims=concat_dim, 990 compat=compat, 991 data_vars=data_vars, 992 coords=coords, 993 ids=ids, 994 join=join, 995 combine_attrs=combine_attrs, 996 ) 997 elif combine == "by_coords": 998 # Redo ordering from coordinates, ignoring how they were ordered 999 # previously 1000 combined = combine_by_coords( 1001 datasets, 1002 compat=compat, (...) 1006 combine_attrs=combine_attrs, 1007 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:365, in _nested_combine(datasets, concat_dims, compat, data_vars, coords, ids, fill_value, join, combine_attrs) 362 _check_shape_tile_ids(combined_ids) 364 # Apply series of concatenate or merge operations along each dimension --> 365 combined = _combine_nd( 366 combined_ids, 367 concat_dims, 368 compat=compat, 369 data_vars=data_vars, 370 coords=coords, 371 fill_value=fill_value, 372 join=join, 373 combine_attrs=combine_attrs, 374 ) 375 return combined File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:239, in _combine_nd(combined_ids, concat_dims, data_vars, coords, compat, fill_value, join, combine_attrs) 235 # Each iteration of this loop reduces the length of the tile_ids tuples 236 # by one. It always combines along the first dimension, removing the first 237 # element of the tuple 238 for concat_dim in concat_dims: --> 239 combined_ids = _combine_all_along_first_dim( 240 combined_ids, 241 dim=concat_dim, 242 data_vars=data_vars, 243 coords=coords, 244 compat=compat, 245 fill_value=fill_value, 246 join=join, 247 combine_attrs=combine_attrs, 248 ) 249 (combined_ds,) = combined_ids.values() 250 return combined_ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:275, in _combine_all_along_first_dim(combined_ids, dim, data_vars, coords, compat, fill_value, join, combine_attrs) 273 combined_ids = dict(sorted(group)) 274 datasets = combined_ids.values() --> 275 new_combined_ids[new_id] = _combine_1d( 276 datasets, dim, compat, data_vars, coords, fill_value, join, combine_attrs 277 ) 278 return new_combined_ids File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:298, in _combine_1d(datasets, concat_dim, compat, data_vars, coords, fill_value, join, combine_attrs) 296 if concat_dim is not None: 297 try: --> 298 combined = concat( 299 datasets, 300 dim=concat_dim, 301 data_vars=data_vars, 302 coords=coords, 303 compat=compat, 304 fill_value=fill_value, 305 join=join, 306 combine_attrs=combine_attrs, 307 ) 308 except ValueError as err: 309 if "encountered unexpected variable" in str(err): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:243, in concat(objs, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 231 return _dataarray_concat( 232 objs, 233 dim=dim, (...) 240 combine_attrs=combine_attrs, 241 ) 242 elif isinstance(first_obj, Dataset): --> 243 return _dataset_concat( 244 objs, 245 dim=dim, 246 data_vars=data_vars, 247 coords=coords, 248 compat=compat, 249 positions=positions, 250 fill_value=fill_value, 251 join=join, 252 combine_attrs=combine_attrs, 253 ) 254 else: 255 raise TypeError( 256 "can only concatenate xarray Dataset and DataArray " 257 f"objects, got {type(first_obj)}" 258 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:558, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 556 vars = ensure_common_dims([ds[name].variable for ds in datasets]) 557 except KeyError: --> 558 raise ValueError(f"{name!r} is not present in all datasets.") 560 # Try concatenate the indexes, concatenate the variables when no index 561 # is found on all datasets. 562 indexes: list[Index] = list(get_indexes(name)) ValueError: 'time_centered' is not present in all datasets.