%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
If you submit the job with job scheduler; below are list of enviroment variable one can pass
local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'
%env ychunk='2', #%env tchunk='2'
controls chunk. 'False' sets no modification from original netcdf file's chunk.
ychunk=10 will group the original netcdf file to 10 by 10
tchunk=1 will chunk the time coordinate one by one
%env file_exp=
'file_exp': Which 'experiment' name is it? this corresopnds to intake catalog name without path and .yaml
#%env year=
for Validation, this correspoinds to path/year/month 's year for monitoring, this corresponids to 'date' having means do all files in the monitoring directory setting it as 0[0-9] &1[0-9]& [2-3][0-9], the job can be separated in three lots. For DELTA experiment, year corresponds to really 'year'
%env month=
for monitoring this corresponds to file path path-XIOS.{month}/
For DELTA experiment, year corresponds to really 'month'
proceed saving? True or False , Default is setted as True
proceed plotting? True or False , Default is setted as True
proceed computation? or just load computed result? True or False , Default is setted as True
save output file used for plotting
using kerchunked file -> False, not using kerhcunk -> True
name of control file to be used for computation/plots/save/ We have number of M_xxx.csv
Monitor.sh calls M_MLD_2D
and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh, Integrals.sh , Sections.sh
M_AWTMD
M_Fluxnet
M_Ice_quantities
M_IceClim M_IceConce M_IceThick
M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly
M_Mean_temp_velo M_Mooring
M_Sectionx M_Sectiony
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene4492.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 rome local cluster starting This code is running on irene4492.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 04 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6470427irene4492.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_04M_FWC_SSH/ CPU times: user 581 ms, sys: 136 ms, total: 718 ms Wall time: 19.7 s
Client-ccb09602-18f7-11ed-9c73-080038b93df7
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
785a969b
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-61cf8588-e205-4b70-abd1-3a7b618688b4
Comm: tcp://127.0.0.1:35702 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:44817 | Total threads: 8 |
Dashboard: http://127.0.0.1:35281/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:42109 | |
Local directory: /tmp/dask-worker-space/worker-blhd7qv1 |
Comm: tcp://127.0.0.1:33940 | Total threads: 8 |
Dashboard: http://127.0.0.1:43323/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34198 | |
Local directory: /tmp/dask-worker-space/worker-wanvenfj |
Comm: tcp://127.0.0.1:39540 | Total threads: 8 |
Dashboard: http://127.0.0.1:45147/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38999 | |
Local directory: /tmp/dask-worker-space/worker-w9c0wboe |
Comm: tcp://127.0.0.1:33635 | Total threads: 8 |
Dashboard: http://127.0.0.1:39852/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45859 | |
Local directory: /tmp/dask-worker-space/worker-h_l_c2er |
Comm: tcp://127.0.0.1:43806 | Total threads: 8 |
Dashboard: http://127.0.0.1:42043/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38272 | |
Local directory: /tmp/dask-worker-space/worker-y1n561yf |
Comm: tcp://127.0.0.1:40222 | Total threads: 8 |
Dashboard: http://127.0.0.1:33886/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46330 | |
Local directory: /tmp/dask-worker-space/worker-6cyb4rt4 |
Comm: tcp://127.0.0.1:35330 | Total threads: 8 |
Dashboard: http://127.0.0.1:45783/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40911 | |
Local directory: /tmp/dask-worker-space/worker-4v1210rx |
Comm: tcp://127.0.0.1:34046 | Total threads: 8 |
Dashboard: http://127.0.0.1:40979/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46033 | |
Local directory: /tmp/dask-worker-space/worker-2katd7wt |
Comm: tcp://127.0.0.1:42093 | Total threads: 8 |
Dashboard: http://127.0.0.1:35937/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44477 | |
Local directory: /tmp/dask-worker-space/worker-vlhbc2s4 |
Comm: tcp://127.0.0.1:37013 | Total threads: 8 |
Dashboard: http://127.0.0.1:33086/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35961 | |
Local directory: /tmp/dask-worker-space/worker-jmwq4p9s |
Comm: tcp://127.0.0.1:39399 | Total threads: 8 |
Dashboard: http://127.0.0.1:43938/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34000 | |
Local directory: /tmp/dask-worker-space/worker-lcvx_74r |
Comm: tcp://127.0.0.1:34047 | Total threads: 8 |
Dashboard: http://127.0.0.1:37383/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38633 | |
Local directory: /tmp/dask-worker-space/worker-a4o_ie2a |
Comm: tcp://127.0.0.1:43567 | Total threads: 8 |
Dashboard: http://127.0.0.1:42081/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34444 | |
Local directory: /tmp/dask-worker-space/worker-dpoop9jf |
Comm: tcp://127.0.0.1:41283 | Total threads: 8 |
Dashboard: http://127.0.0.1:32850/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43811 | |
Local directory: /tmp/dask-worker-space/worker-155ksbf6 |
Comm: tcp://127.0.0.1:41093 | Total threads: 8 |
Dashboard: http://127.0.0.1:33685/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33607 | |
Local directory: /tmp/dask-worker-space/worker-xp62knoa |
Comm: tcp://127.0.0.1:40484 | Total threads: 8 |
Dashboard: http://127.0.0.1:44745/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40906 | |
Local directory: /tmp/dask-worker-space/worker-b17d51wc |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
FWC_SSH | calc.FWC_SSH_load(data,nc_outputpath) | BBFG | FWC_SSH | None | None | m | S-1 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing False lazy= False CPU times: user 373 µs, sys: 56 µs, total: 429 µs Wall time: 428 µs
0
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= True #save= False #plot= True Value='FWC_SSH' Zone='BBFG' Plot='FWC_SSH' cmap='None' clabel='m' clim= None outputpath='../results/SEDNA_DELTA_MONITOR/' nc_outputpath='../nc_results/SEDNA_DELTA_MONITOR/' filename='SEDNA_FWC_SSH_BBFG_FWC_SSH' #3 Start computing data= calc.FWC_SSH_load(data,nc_outputpath) monitor.optimize_dataset(data) start loading data filename= ../nc_results/SEDNA_DELTA_MONITOR/SEDNA_maps_ALL_SSH_anomaly/t_*/y_*/x_*.nc dim ('x', 'y', 't') load computed data completed start loading data filename= ../nc_results/SEDNA_DELTA_MONITOR/SEDNA_maps_BBFG_FWC_2D/t_*/y_*/x_*.nc dim ('x', 'y', 't')
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) File <timed eval>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:67, in auto(df, val, savefig, daskreport, outputpath, file_exp) 65 #print('count:',data.count()) 66 with performance_report(filename=daskreport+"_calc_"+step.Value+".html"): ---> 67 data=eval(command) 68 #print('persist ') 69 #data=data.persist() 70 print('add optimise here once otimise can recognise') File <string>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/calc.py:225, in FWC_SSH_load(data, nc_outputpath) 223 #ds=save.load_data(plot='map',path=nc_outputpath,filename=filename) 224 filename='SEDNA_maps_BBFG_FWC_2D' --> 225 ds['FWC2D']=save.load_data(plot='map',path=nc_outputpath,filename=filename).FWC2D 226 return ds File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:38, in load_data(plot, path, filename) 36 data=load_twoD(path,filename,nested=False) 37 else: ---> 38 data=load_twoD(path,filename) 39 print('load computed data completed') 40 return data File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:48, in load_twoD(path, filename, nested) 46 dim=('x','y','t') if nested else ('t') 47 print ('filename=',filename,'dim',dim) ---> 48 return xr.open_mfdataset(filename,parallel=True 49 ,compat='override' 50 ,data_vars='minimal' 51 #,concat_dim=dim 52 #,combine='nested' #param_xios 53 ,coords='minimal') File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:1000, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs) 987 combined = _nested_combine( 988 datasets, 989 concat_dims=concat_dim, (...) 995 combine_attrs=combine_attrs, 996 ) 997 elif combine == "by_coords": 998 # Redo ordering from coordinates, ignoring how they were ordered 999 # previously -> 1000 combined = combine_by_coords( 1001 datasets, 1002 compat=compat, 1003 data_vars=data_vars, 1004 coords=coords, 1005 join=join, 1006 combine_attrs=combine_attrs, 1007 ) 1008 else: 1009 raise ValueError( 1010 "{} is an invalid option for the keyword argument" 1011 " ``combine``".format(combine) 1012 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:982, in combine_by_coords(data_objects, compat, data_vars, coords, fill_value, join, combine_attrs, datasets) 980 concatenated_grouped_by_data_vars = [] 981 for vars, datasets_with_same_vars in grouped_by_vars: --> 982 concatenated = _combine_single_variable_hypercube( 983 list(datasets_with_same_vars), 984 fill_value=fill_value, 985 data_vars=data_vars, 986 coords=coords, 987 compat=compat, 988 join=join, 989 combine_attrs=combine_attrs, 990 ) 991 concatenated_grouped_by_data_vars.append(concatenated) 993 return merge( 994 concatenated_grouped_by_data_vars, 995 compat=compat, (...) 998 combine_attrs=combine_attrs, 999 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:640, in _combine_single_variable_hypercube(datasets, fill_value, data_vars, coords, compat, join, combine_attrs) 637 _check_dimension_depth_tile_ids(combined_ids) 639 # Concatenate along all of concat_dims one by one to create single ds --> 640 concatenated = _combine_nd( 641 combined_ids, 642 concat_dims=concat_dims, 643 data_vars=data_vars, 644 coords=coords, 645 compat=compat, 646 fill_value=fill_value, 647 join=join, 648 combine_attrs=combine_attrs, 649 ) 651 # Check the overall coordinates are monotonically increasing 652 for dim in concat_dims: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:239, in _combine_nd(combined_ids, concat_dims, data_vars, coords, compat, fill_value, join, combine_attrs) 235 # Each iteration of this loop reduces the length of the tile_ids tuples 236 # by one. It always combines along the first dimension, removing the first 237 # element of the tuple 238 for concat_dim in concat_dims: --> 239 combined_ids = _combine_all_along_first_dim( 240 combined_ids, 241 dim=concat_dim, 242 data_vars=data_vars, 243 coords=coords, 244 compat=compat, 245 fill_value=fill_value, 246 join=join, 247 combine_attrs=combine_attrs, 248 ) 249 (combined_ds,) = combined_ids.values() 250 return combined_ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:275, in _combine_all_along_first_dim(combined_ids, dim, data_vars, coords, compat, fill_value, join, combine_attrs) 273 combined_ids = dict(sorted(group)) 274 datasets = combined_ids.values() --> 275 new_combined_ids[new_id] = _combine_1d( 276 datasets, dim, compat, data_vars, coords, fill_value, join, combine_attrs 277 ) 278 return new_combined_ids File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/combine.py:298, in _combine_1d(datasets, concat_dim, compat, data_vars, coords, fill_value, join, combine_attrs) 296 if concat_dim is not None: 297 try: --> 298 combined = concat( 299 datasets, 300 dim=concat_dim, 301 data_vars=data_vars, 302 coords=coords, 303 compat=compat, 304 fill_value=fill_value, 305 join=join, 306 combine_attrs=combine_attrs, 307 ) 308 except ValueError as err: 309 if "encountered unexpected variable" in str(err): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:243, in concat(objs, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 231 return _dataarray_concat( 232 objs, 233 dim=dim, (...) 240 combine_attrs=combine_attrs, 241 ) 242 elif isinstance(first_obj, Dataset): --> 243 return _dataset_concat( 244 objs, 245 dim=dim, 246 data_vars=data_vars, 247 coords=coords, 248 compat=compat, 249 positions=positions, 250 fill_value=fill_value, 251 join=join, 252 combine_attrs=combine_attrs, 253 ) 254 else: 255 raise TypeError( 256 "can only concatenate xarray Dataset and DataArray " 257 f"objects, got {type(first_obj)}" 258 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/core/concat.py:597, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs) 595 absent_coord_names = coord_names - set(result.variables) 596 if absent_coord_names: --> 597 raise ValueError( 598 f"Variables {absent_coord_names!r} are coordinates in some datasets but not others." 599 ) 600 result = result.set_coords(coord_names) 601 result.encoding = result_encoding ValueError: Variables {'time_centered'} are coordinates in some datasets but not others.