%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
# 'month': = 'JOBID' almost month but not really,
# If you submit the job with job scheduler, above
#below are list of enviroment variable one can pass
#%env local='2"
# local : if True run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.
# ychunk=10 will group the original netcdf file to 10 by 10
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH
# name of control file to be used for computation/plots/save/
#%env file_exp=
# 'file_exp': Which 'experiment' name is it?
#. this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring this corresponds to file path path-XIOS.{month}/
#
#%env save= proceed saving? True or False , Default is setted as True
#%env plot= proceed plotting? True or False , Default is setted as True
#%env calc= proceed computation? or just load computed result? True or False , Default is setted as True
#%env save=False
#%env lazy=False
%env file_exp=SEDNA_DELTA_MONITOR
%env year=2012
%env month=02
%env ychunk=10
%env save=False
%env plot=True
%env calc=True
%env lazy=
#False
%env control=M_IceClim
#M_Sectiony ok with ychunk=False local=True lazy=False
env: file_exp=SEDNA_DELTA_MONITOR env: year=2012 env: month=02 env: ychunk=10 env: save=False env: plot=True env: calc=True env: lazy= env: control=M_IceClim
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene4708.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False rome local cluster starting This code is running on irene4708.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 02 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6414218irene4708.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_02M_IceClim/_lazy CPU times: user 547 ms, sys: 146 ms, total: 693 ms Wall time: 21.1 s
Client-290030f6-134b-11ed-b2b3-080038b93ac7
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
56ea27d5
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-467beff6-0e53-4116-83e0-c74e36b7a8cc
Comm: tcp://127.0.0.1:41446 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:46349 | Total threads: 8 |
Dashboard: http://127.0.0.1:42903/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36720 | |
Local directory: /tmp/dask-worker-space/worker-d6kfymj1 |
Comm: tcp://127.0.0.1:43734 | Total threads: 8 |
Dashboard: http://127.0.0.1:36908/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33187 | |
Local directory: /tmp/dask-worker-space/worker-bkfyn9wn |
Comm: tcp://127.0.0.1:33012 | Total threads: 8 |
Dashboard: http://127.0.0.1:37750/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40516 | |
Local directory: /tmp/dask-worker-space/worker-tbbje_c1 |
Comm: tcp://127.0.0.1:38632 | Total threads: 8 |
Dashboard: http://127.0.0.1:33514/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:32789 | |
Local directory: /tmp/dask-worker-space/worker-7fc1y2ms |
Comm: tcp://127.0.0.1:36043 | Total threads: 8 |
Dashboard: http://127.0.0.1:35458/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34539 | |
Local directory: /tmp/dask-worker-space/worker-u4sx71jx |
Comm: tcp://127.0.0.1:46816 | Total threads: 8 |
Dashboard: http://127.0.0.1:37712/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40954 | |
Local directory: /tmp/dask-worker-space/worker-wkq5g4d0 |
Comm: tcp://127.0.0.1:40950 | Total threads: 8 |
Dashboard: http://127.0.0.1:46441/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45381 | |
Local directory: /tmp/dask-worker-space/worker-8njxazok |
Comm: tcp://127.0.0.1:36231 | Total threads: 8 |
Dashboard: http://127.0.0.1:37619/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34439 | |
Local directory: /tmp/dask-worker-space/worker-vhx7xsmf |
Comm: tcp://127.0.0.1:39367 | Total threads: 8 |
Dashboard: http://127.0.0.1:44076/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43521 | |
Local directory: /tmp/dask-worker-space/worker-st3ml870 |
Comm: tcp://127.0.0.1:38062 | Total threads: 8 |
Dashboard: http://127.0.0.1:41307/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:37566 | |
Local directory: /tmp/dask-worker-space/worker-avotknuj |
Comm: tcp://127.0.0.1:40482 | Total threads: 8 |
Dashboard: http://127.0.0.1:37145/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:39282 | |
Local directory: /tmp/dask-worker-space/worker-r4yifjzg |
Comm: tcp://127.0.0.1:42935 | Total threads: 8 |
Dashboard: http://127.0.0.1:44469/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38481 | |
Local directory: /tmp/dask-worker-space/worker-u021cnqd |
Comm: tcp://127.0.0.1:44199 | Total threads: 8 |
Dashboard: http://127.0.0.1:40891/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:39487 | |
Local directory: /tmp/dask-worker-space/worker-fy4_g7gl |
Comm: tcp://127.0.0.1:34448 | Total threads: 8 |
Dashboard: http://127.0.0.1:44661/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36315 | |
Local directory: /tmp/dask-worker-space/worker-rk5ufvxz |
Comm: tcp://127.0.0.1:40430 | Total threads: 8 |
Dashboard: http://127.0.0.1:46836/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:46411 | |
Local directory: /tmp/dask-worker-space/worker-dfy_9w38 |
Comm: tcp://127.0.0.1:43582 | Total threads: 8 |
Dashboard: http://127.0.0.1:37723/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36215 | |
Local directory: /tmp/dask-worker-space/worker-gqo3d3bm |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
IceClim | calc.IceClim_load(data,nc_outputpath) | ALL | IceClim | Spectral | (0,5) | m | M-4 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','True' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing False lazy= CPU times: user 334 µs, sys: 54 µs, total: 388 µs Wall time: 377 µs
0
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= True #save= False #plot= True Value='IceClim' Zone='ALL' Plot='IceClim' cmap='Spectral' clabel='m' clim= (0, 5) outputpath='../results/SEDNA_DELTA_MONITOR/' nc_outputpath='../nc_results/SEDNA_DELTA_MONITOR/' filename='SEDNA_IceClim_ALL_IceClim' #3 Start computing dtaa= calc.IceClim_load(data,nc_outputpath) start saving data filename= ../nc_results/SEDNA_DELTA_MONITOR/SEDNA_maps_ALL_IceConce/t_*/x_*/y_*.nc
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) File <timed eval>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:64, in auto(df, val, savefig, daskreport, outputpath, file_exp) 62 #print('count:',data.count()) 63 with performance_report(filename=daskreport+"_calc_"+step.Value+".html"): ---> 64 data=eval(command) 65 #print('persist ') 66 #data=data.persist() 67 #print(data.count()) 68 #print('nbytes:',data.nbytes) 69 #print('count:',data.count()) 70 optimize_dataset(data) File <string>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/calc.py:231, in IceClim_load(data, nc_outputpath) 229 import xarray as xr 230 filename='SEDNA_maps_ALL_IceConce' --> 231 ds=save.load_data(plot='map',path=nc_outputpath,filename=filename) 232 filename='SEDNA_maps_ALL_IceThickness' 233 ds['sivolu']=save.load_data(plot='map',path=nc_outputpath,filename=filename).sivolu File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:38, in load_data(plot, path, filename) 36 data=load_twoD(path,filename,nested=False) 37 else: ---> 38 data=load_twoD(path,filename) 39 print('load computed data completed') 40 return data File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:47, in load_twoD(path, filename, nested) 45 filename=filesave+'/t_*/x_*/y_*.nc' if nested else filesave+'/t_*.nc' 46 print ('filename=',filename) ---> 47 return xr.open_mfdataset(filename,parallel=True 48 ,compat='override' 49 ,data_vars='minimal' 50 ,concat_dim=('x','y','t') 51 ,combine='by_coords' #param_xios 52 ,coords='minimal') File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:952, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs) 947 ids, paths = ( 948 list(combined_ids_paths.keys()), 949 list(combined_ids_paths.values()), 950 ) 951 elif combine == "by_coords" and concat_dim is not None: --> 952 raise ValueError( 953 "When combine='by_coords', passing a value for `concat_dim` has no " 954 "effect. To manually combine along a specific dimension you should " 955 "instead specify combine='nested' along with a value for `concat_dim`.", 956 ) 958 open_kwargs = dict(engine=engine, chunks=chunks or {}, **kwargs) 960 if parallel: ValueError: When combine='by_coords', passing a value for `concat_dim` has no effect. To manually combine along a specific dimension you should instead specify combine='nested' along with a value for `concat_dim`.