In [1]:
%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()

from core import  load, zoom, calc, save,plots,monitor
In [2]:
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
Out[2]:
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
In [3]:
# 'month':  = 'JOBID' almost month but not really, 

# If you submit the job with job scheduler, above

#below are list of enviroment variable one can pass
#%env local='2"
# local : if True  run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.  
# ychunk=10 will group the original netcdf file to 10 by 10 
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH 
# name of control file to be used for computation/plots/save/ 
#%env file_exp= 
# 'file_exp': Which 'experiment' name is it? 
#.    this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date'  having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring  this corresponds to file path path-XIOS.{month}/
#
#%env save=   proceed saving?   True or False  , Default is setted as True 
#%env plot=   proceed plotting?  True or False , Default is setted as True 
#%env calc=   proceed computation? or just load computed result? True or False , Default is setted as True 
#%env save=False
#%env lazy=False
In [4]:
%%time
# 'savefig': Do we save output in html? or not. keep it true. 
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True
using host= irene4670.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16
10000000000
False
rome local cluster starting
This code is running on  irene4670.c-irene.mg1.tgcc.ccc.cea.fr using  SEDNA_DELTA_MONITOR file experiment, read from  ../lib/SEDNA_DELTA_MONITOR.yaml  on year= 2012  on month= 01  outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6413732irene4670.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_01M_Fluxnet/
CPU times: user 493 ms, sys: 160 ms, total: 653 ms
Wall time: 18.8 s
Out[4]:

Client

Client-6593a3bf-1343-11ed-8f29-080038b93af9

Connection method: Cluster object Cluster type: distributed.LocalCluster
Dashboard: http://127.0.0.1:8787/status

Cluster Info

LocalCluster

8cc78e16

Dashboard: http://127.0.0.1:8787/status Workers: 16
Total threads: 128 Total memory: 251.06 GiB
Status: running Using processes: True

Scheduler Info

Scheduler

Scheduler-625264ea-3ef3-42f9-8640-64fd0b8caff0

Comm: tcp://127.0.0.1:36381 Workers: 16
Dashboard: http://127.0.0.1:8787/status Total threads: 128
Started: Just now Total memory: 251.06 GiB

Workers

Worker: 0

Comm: tcp://127.0.0.1:40858 Total threads: 8
Dashboard: http://127.0.0.1:40941/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:46472
Local directory: /tmp/dask-worker-space/worker-8clb0hdf

Worker: 1

Comm: tcp://127.0.0.1:42172 Total threads: 8
Dashboard: http://127.0.0.1:39674/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:33505
Local directory: /tmp/dask-worker-space/worker-m99qc6i6

Worker: 2

Comm: tcp://127.0.0.1:33500 Total threads: 8
Dashboard: http://127.0.0.1:36093/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:44713
Local directory: /tmp/dask-worker-space/worker-ny7yeuyt

Worker: 3

Comm: tcp://127.0.0.1:44895 Total threads: 8
Dashboard: http://127.0.0.1:38517/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:36922
Local directory: /tmp/dask-worker-space/worker-794yr0g2

Worker: 4

Comm: tcp://127.0.0.1:46729 Total threads: 8
Dashboard: http://127.0.0.1:36162/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:36090
Local directory: /tmp/dask-worker-space/worker-6tg9u5yt

Worker: 5

Comm: tcp://127.0.0.1:33175 Total threads: 8
Dashboard: http://127.0.0.1:41957/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:33068
Local directory: /tmp/dask-worker-space/worker-qnahrvf7

Worker: 6

Comm: tcp://127.0.0.1:41779 Total threads: 8
Dashboard: http://127.0.0.1:42587/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:33671
Local directory: /tmp/dask-worker-space/worker-u0f26982

Worker: 7

Comm: tcp://127.0.0.1:38838 Total threads: 8
Dashboard: http://127.0.0.1:35720/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:46134
Local directory: /tmp/dask-worker-space/worker-57fwzx07

Worker: 8

Comm: tcp://127.0.0.1:34060 Total threads: 8
Dashboard: http://127.0.0.1:34825/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:45192
Local directory: /tmp/dask-worker-space/worker-hokp1fbh

Worker: 9

Comm: tcp://127.0.0.1:40845 Total threads: 8
Dashboard: http://127.0.0.1:35018/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:35906
Local directory: /tmp/dask-worker-space/worker-15ivsjqb

Worker: 10

Comm: tcp://127.0.0.1:43168 Total threads: 8
Dashboard: http://127.0.0.1:38562/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:34437
Local directory: /tmp/dask-worker-space/worker-mytg18zw

Worker: 11

Comm: tcp://127.0.0.1:37563 Total threads: 8
Dashboard: http://127.0.0.1:46591/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:46826
Local directory: /tmp/dask-worker-space/worker-8mb4so3i

Worker: 12

Comm: tcp://127.0.0.1:37736 Total threads: 8
Dashboard: http://127.0.0.1:46550/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:41309
Local directory: /tmp/dask-worker-space/worker-8wb7vk2i

Worker: 13

Comm: tcp://127.0.0.1:44319 Total threads: 8
Dashboard: http://127.0.0.1:45245/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:34015
Local directory: /tmp/dask-worker-space/worker-8zt8grlq

Worker: 14

Comm: tcp://127.0.0.1:39901 Total threads: 8
Dashboard: http://127.0.0.1:36739/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:37350
Local directory: /tmp/dask-worker-space/worker-006v3c9o

Worker: 15

Comm: tcp://127.0.0.1:42787 Total threads: 8
Dashboard: http://127.0.0.1:38830/status Memory: 15.69 GiB
Nanny: tcp://127.0.0.1:45095
Local directory: /tmp/dask-worker-space/worker-8p0wa3n0

read plotting information from a csv file¶

In [5]:
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Out[5]:
Value Inputs Equation Zone Plot Colourmap MinMax Unit Oldname Unnamed: 10
Fluxnet gridV.vomecrty,param.e3v_0,param.e1v,param.mas... calc.Fluxnet(data) FramS_All Fluxnet_integrals None ((-10,10),(-10,50) ,(-150,50),(-25,5) ) (Sv,TW, mSv,10^-2 Sv) I-6
Fluxnet gridV.vomecrty,param.e3v_0,param.e1v,param.mas... calc.Fluxnet(data) Davis Fluxnet_integrals None ((-5.0,5.0),(-25,27) ,(-200,50),(-9,5) ) (Sv,TW, mSv,10^-2 Sv) I-6
Fluxnet gridV.vomecrty,param.e3v_0,param.e1v,param.mas... calc.Fluxnet(data) Bering Fluxnet_integrals None ((-2,2),(-10,50) ,(-150,50),(-2,4) ) (Sv,TW, mSv,10^-2 Sv) I-6

Computation starts here¶

Each computation consists of

  1. Load NEMO data set
  2. Zoom data set
  3. Compute (or load computed data set)
  4. Save
  5. Plot
  6. Close
In [6]:
%%time
import os
calcswitch=os.environ.get('calc', 'True') 
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any()) 
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0 
data
calcswitch= False df.Inputs != nothing True lazy= False
CPU times: user 330 µs, sys: 56 µs, total: 386 µs
Wall time: 371 µs
Out[6]:
0
In [7]:
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
            )
#calc= False
#save= False
#plot= True
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
File <timed eval>:1, in <module>

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:26, in auto(df, val, savefig, daskreport, outputpath, file_exp)
     23 print('#plot=',plotswitch )  
     24 for step in df.itertuples():
     25 # 1. Create data set
---> 26     optimize_dataset(val)
     27     data=val
     28     Value=step.Value

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:100, in optimize_dataset(ds)
     98 def optimize_dataset(ds):
     99     import dask
--> 100     for varname, da in ds.data_vars.items():
    101         #print(varname)
    102         da=da.data
    103         (da,)=dask.optimize(da)

AttributeError: 'int' object has no attribute 'data_vars'