%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
# 'month': = 'JOBID' almost month but not really,
# If you submit the job with job scheduler, above
#below are list of enviroment variable one can pass
#%env local='2"
# local : if True run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.
# ychunk=10 will group the original netcdf file to 10 by 10
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH
# name of control file to be used for computation/plots/save/
#%env file_exp=
# 'file_exp': Which 'experiment' name is it?
#. this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring this corresponds to file path path-XIOS.{month}/
#
#%env save= proceed saving? True or False , Default is setted as True
#%env plot= proceed plotting? True or False , Default is setted as True
#%env calc= proceed computation? or just load computed result? True or False , Default is setted as True
#%env save=False
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene4035.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False not local in tgcc rome local cluster starting This code is running on irene4035.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_ALPHA_MONITOR file experiment, read from ../lib/SEDNA_ALPHA_MONITOR.yaml on year= * on month= 23 outputpath= ../results/SEDNA_ALPHA_MONITOR/23/ daskreport= ../results/dask/2532068irene4035.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_ALPHA_MONITOR_23IceClim/ CPU times: user 373 ms, sys: 238 ms, total: 612 ms Wall time: 11.2 s
Client
|
Cluster
|
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
IceClim | calc.IceClim_load(data,nc_outputpath) | ALL | IceClim | Spectral | (0,5) | m | M-4 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing False CPU times: user 364 µs, sys: 92 µs, total: 456 µs Wall time: 420 µs
0
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= True #save= False #plot= True Zone='ALL' Value='IceClim' cmap='Spectral' clabel='m' clim= (0, 5) outputpath='../results/SEDNA_ALPHA_MONITOR/23/' nc_outputpath='../nc_results/SEDNA_ALPHA_MONITOR/23/' filename='SEDNA_IceClim_ALL_IceClim' #3 Start computing dtaa= calc.IceClim_load(data,nc_outputpath) start saving data filename= ../nc_results/SEDNA_ALPHA_MONITOR/23/SEDNA_maps_ALL_IceConce/t_*/x_*/y_*.nc
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) <timed eval> in <module> /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py in auto(df, val, savefig, daskreport, outputpath, file_exp) 58 #print('count:',data.count()) 59 with performance_report(filename=daskreport+"_calc_"+step.Value+".html"): ---> 60 data=eval(command) 61 #print('persist ') 62 #data=data.persist() /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py in <module> /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/calc.py in IceClim_load(data, nc_outputpath) 229 import xarray as xr 230 filename='SEDNA_maps_ALL_IceConce' --> 231 ds=save.load_data(plot='map',path=nc_outputpath,filename=filename) 232 filename='SEDNA_maps_ALL_IceThickness' 233 ds['sivolu']=save.load_data(plot='map',path=nc_outputpath,filename=filename).sivolu /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in load_data(plot, path, filename) 36 data=load_twoD(path,filename,nested=False) 37 else: ---> 38 data=load_twoD(path,filename) 39 print('load computed data completed') 40 return data /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in load_twoD(path, filename, nested) 50 ,concat_dim=('x','y','t') 51 ,combine='by_coords' #param_xios ---> 52 ,coords='minimal') 53 54 def twoD(data,path='../result',filename='toto',nested=True): ~/monitor/lib/python3.7/site-packages/xarray/backends/api.py in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, lock, data_vars, coords, combine, autoclose, parallel, join, attrs_file, **kwargs) 980 coords=coords, 981 join=join, --> 982 combine_attrs="drop", 983 ) 984 else: ~/monitor/lib/python3.7/site-packages/xarray/core/combine.py in combine_by_coords(datasets, compat, data_vars, coords, fill_value, join, combine_attrs) 798 fill_value=fill_value, 799 join=join, --> 800 combine_attrs=combine_attrs, 801 ) ~/monitor/lib/python3.7/site-packages/xarray/core/merge.py in merge(objects, compat, join, fill_value, combine_attrs) 860 join, 861 combine_attrs=combine_attrs, --> 862 fill_value=fill_value, 863 ) 864 merged = Dataset._construct_direct(**merge_result._asdict()) ~/monitor/lib/python3.7/site-packages/xarray/core/merge.py in merge_core(objects, compat, join, combine_attrs, priority_arg, explicit_coords, indexes, fill_value) 590 coerced = coerce_pandas_values(objects) 591 aligned = deep_align( --> 592 coerced, join=join, copy=False, indexes=indexes, fill_value=fill_value 593 ) 594 collected = collect_variables_and_indexes(aligned) ~/monitor/lib/python3.7/site-packages/xarray/core/alignment.py in deep_align(objects, join, copy, indexes, exclude, raise_on_invalid, fill_value) 425 indexes=indexes, 426 exclude=exclude, --> 427 fill_value=fill_value, 428 ) 429 ~/monitor/lib/python3.7/site-packages/xarray/core/alignment.py in align(join, copy, indexes, exclude, fill_value, *objects) 331 "aligned because they have different dimension size(s) %r " 332 "than the size of the aligned dimension labels: %r" --> 333 % (dim, unlabeled_sizes, labeled_size) 334 ) 335 ValueError: arguments without labels along dimension 'y' cannot be aligned because they have different dimension size(s) {120} than the size of the aligned dimension labels: 4572