In [1]:
%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()

from core import  load, zoom, calc, save,plots,monitor
In [2]:
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
Out[2]:
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>

If you submit the job with job scheduler, above¶

below are list of enviroment variable one can pass

%env local='2"¶

local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'

%env ychunk='2'¶

%env tchunk='2'¶

controls chunk. 'False' sets no modification from original netcdf file's chunk.¶

ychunk=10 will group the original netcdf file to 10 by 10¶

tchunk=1 will chunk the time coordinate one by one¶

%env file_exp=¶

'file_exp': Which 'experiment' name is it?¶

. this corresopnds to intake catalog name without path and .yaml¶

%env year=¶

for Validation, this correspoinds to path/year/month 's year¶

for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory¶

setting it as 0[0-9] &1[0-9]& *[2-3][0-9], the job can be separated in three lots.¶

%env month=¶

for monitoring this corresponds to file path path-XIOS.{month}/¶

#

%env control=FWC_SSH¶

name of control file to be used for computation/plots/save/ & how it is called from Monitor.sh¶

Monitor.sh calls M_MLD_2D

and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh

  • AWTD.sh M_AWTMD

  • Fluxnet.sh M_Fluxnet

  • Siconc.sh M_Ice_quantities
  • IceClim.sh M_IceClim M_IceConce M_IceThick

FWC_SSH.sh M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly

Integrals.sh M_Mean_temp_velo M_Mooring M_Sectionx M_Sectiony

%env save= proceed saving? True or False , Default is setted as True¶

%env plot= proceed plotting? True or False , Default is setted as True¶

%env calc= proceed computation? or just load computed result? True or False , Default is setted as True¶

%env save=False¶

%env lazy=False¶

For debugging this cell can help¶

%env file_exp=SEDNA_DELTA_MONITOR %env year=2012 %env month=01

0[1-2]¶

%env ychunk=10 %env ychunk=False %env save=False %env plot=True %env calc=True # %env lazy=False

False¶

%env control=M_Fluxnet

M_Sectiony ok with ychunk=False local=True lazy=False¶

In [3]:
%%time
# 'savefig': Do we save output in html? or not. keep it true. 
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True
using host= irene5098.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16
10000000000
False
tgcc local cluster starting
This code is running on  irene5098.c-irene.mg1.tgcc.ccc.cea.fr using  SEDNA_DELTA_MONITOR file experiment, read from  ../lib/SEDNA_DELTA_MONITOR.yaml  on year= 2012  on month= 01  outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6419172irene5098.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_01M_SSH_anomaly/
CPU times: user 3.95 s, sys: 828 ms, total: 4.78 s
Wall time: 1min 41s
Out[3]:

Client

Client-9cda9596-13d9-11ed-b840-080038b93c71

Connection method: Cluster object Cluster type: distributed.LocalCluster
Dashboard: http://127.0.0.1:8787/status

Cluster Info

LocalCluster

6207cad7

Dashboard: http://127.0.0.1:8787/status Workers: 64
Total threads: 256 Total memory: 251.06 GiB
Status: running Using processes: True

Scheduler Info

Scheduler

Scheduler-62825d5a-b045-49ed-90fb-a252c8919f77

Comm: tcp://127.0.0.1:39659 Workers: 64
Dashboard: http://127.0.0.1:8787/status Total threads: 256
Started: 1 minute ago Total memory: 251.06 GiB

Workers

Worker: 0

Comm: tcp://127.0.0.1:46715 Total threads: 4
Dashboard: http://127.0.0.1:44495/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34265
Local directory: /tmp/dask-worker-space/worker-y035isca

Worker: 1

Comm: tcp://127.0.0.1:36336 Total threads: 4
Dashboard: http://127.0.0.1:40917/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40484
Local directory: /tmp/dask-worker-space/worker-vp55n67i

Worker: 2

Comm: tcp://127.0.0.1:41591 Total threads: 4
Dashboard: http://127.0.0.1:35289/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42315
Local directory: /tmp/dask-worker-space/worker-wk3d0gsv

Worker: 3

Comm: tcp://127.0.0.1:34713 Total threads: 4
Dashboard: http://127.0.0.1:39827/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40240
Local directory: /tmp/dask-worker-space/worker-l_qdefbi

Worker: 4

Comm: tcp://127.0.0.1:35078 Total threads: 4
Dashboard: http://127.0.0.1:45818/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44463
Local directory: /tmp/dask-worker-space/worker-f2iit52q

Worker: 5

Comm: tcp://127.0.0.1:35309 Total threads: 4
Dashboard: http://127.0.0.1:46273/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33176
Local directory: /tmp/dask-worker-space/worker-_6wi7gs8

Worker: 6

Comm: tcp://127.0.0.1:40866 Total threads: 4
Dashboard: http://127.0.0.1:36653/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46861
Local directory: /tmp/dask-worker-space/worker-co3hro95

Worker: 7

Comm: tcp://127.0.0.1:38703 Total threads: 4
Dashboard: http://127.0.0.1:46046/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41620
Local directory: /tmp/dask-worker-space/worker-uwmmr6qp

Worker: 8

Comm: tcp://127.0.0.1:40607 Total threads: 4
Dashboard: http://127.0.0.1:43215/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38506
Local directory: /tmp/dask-worker-space/worker-ve4glozl

Worker: 9

Comm: tcp://127.0.0.1:37271 Total threads: 4
Dashboard: http://127.0.0.1:36082/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44441
Local directory: /tmp/dask-worker-space/worker-66f0udu5

Worker: 10

Comm: tcp://127.0.0.1:44733 Total threads: 4
Dashboard: http://127.0.0.1:38747/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40097
Local directory: /tmp/dask-worker-space/worker-49y3jsoa

Worker: 11

Comm: tcp://127.0.0.1:41978 Total threads: 4
Dashboard: http://127.0.0.1:43737/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37411
Local directory: /tmp/dask-worker-space/worker-lv2d_b5w

Worker: 12

Comm: tcp://127.0.0.1:41819 Total threads: 4
Dashboard: http://127.0.0.1:38726/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40621
Local directory: /tmp/dask-worker-space/worker-v4c6sezx

Worker: 13

Comm: tcp://127.0.0.1:40949 Total threads: 4
Dashboard: http://127.0.0.1:38549/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36143
Local directory: /tmp/dask-worker-space/worker-zj3whkr0

Worker: 14

Comm: tcp://127.0.0.1:40427 Total threads: 4
Dashboard: http://127.0.0.1:34583/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35335
Local directory: /tmp/dask-worker-space/worker-1_nsjgta

Worker: 15

Comm: tcp://127.0.0.1:45641 Total threads: 4
Dashboard: http://127.0.0.1:45710/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42956
Local directory: /tmp/dask-worker-space/worker-sqbh47_n

Worker: 16

Comm: tcp://127.0.0.1:42870 Total threads: 4
Dashboard: http://127.0.0.1:46578/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46733
Local directory: /tmp/dask-worker-space/worker-dicelg5y

Worker: 17

Comm: tcp://127.0.0.1:40826 Total threads: 4
Dashboard: http://127.0.0.1:36318/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46633
Local directory: /tmp/dask-worker-space/worker-aqzzpfie

Worker: 18

Comm: tcp://127.0.0.1:44415 Total threads: 4
Dashboard: http://127.0.0.1:35104/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39866
Local directory: /tmp/dask-worker-space/worker-weqfcwcf

Worker: 19

Comm: tcp://127.0.0.1:43458 Total threads: 4
Dashboard: http://127.0.0.1:34382/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35370
Local directory: /tmp/dask-worker-space/worker-0dyd6vs5

Worker: 20

Comm: tcp://127.0.0.1:35212 Total threads: 4
Dashboard: http://127.0.0.1:43054/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35339
Local directory: /tmp/dask-worker-space/worker-limxn4c0

Worker: 21

Comm: tcp://127.0.0.1:37100 Total threads: 4
Dashboard: http://127.0.0.1:36226/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36333
Local directory: /tmp/dask-worker-space/worker-mj1jl4vh

Worker: 22

Comm: tcp://127.0.0.1:40571 Total threads: 4
Dashboard: http://127.0.0.1:36161/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41928
Local directory: /tmp/dask-worker-space/worker-f36werip

Worker: 23

Comm: tcp://127.0.0.1:43643 Total threads: 4
Dashboard: http://127.0.0.1:33834/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33919
Local directory: /tmp/dask-worker-space/worker-qr5tg391

Worker: 24

Comm: tcp://127.0.0.1:40396 Total threads: 4
Dashboard: http://127.0.0.1:35554/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43717
Local directory: /tmp/dask-worker-space/worker-71i385gh

Worker: 25

Comm: tcp://127.0.0.1:39065 Total threads: 4
Dashboard: http://127.0.0.1:36168/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46805
Local directory: /tmp/dask-worker-space/worker-_jmpa2u0

Worker: 26

Comm: tcp://127.0.0.1:36561 Total threads: 4
Dashboard: http://127.0.0.1:39936/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45318
Local directory: /tmp/dask-worker-space/worker-zmnd0xea

Worker: 27

Comm: tcp://127.0.0.1:46807 Total threads: 4
Dashboard: http://127.0.0.1:36760/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34174
Local directory: /tmp/dask-worker-space/worker-skl509lf

Worker: 28

Comm: tcp://127.0.0.1:40019 Total threads: 4
Dashboard: http://127.0.0.1:35896/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34360
Local directory: /tmp/dask-worker-space/worker-8uoyysse

Worker: 29

Comm: tcp://127.0.0.1:43612 Total threads: 4
Dashboard: http://127.0.0.1:39731/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46278
Local directory: /tmp/dask-worker-space/worker-rrmxjfvw

Worker: 30

Comm: tcp://127.0.0.1:42926 Total threads: 4
Dashboard: http://127.0.0.1:35151/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33553
Local directory: /tmp/dask-worker-space/worker-ow_dql29

Worker: 31

Comm: tcp://127.0.0.1:45498 Total threads: 4
Dashboard: http://127.0.0.1:44958/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44081
Local directory: /tmp/dask-worker-space/worker-et1gafzg

Worker: 32

Comm: tcp://127.0.0.1:36688 Total threads: 4
Dashboard: http://127.0.0.1:46785/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37538
Local directory: /tmp/dask-worker-space/worker-_kttfqdq

Worker: 33

Comm: tcp://127.0.0.1:40627 Total threads: 4
Dashboard: http://127.0.0.1:46525/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46256
Local directory: /tmp/dask-worker-space/worker-lb55wwuj

Worker: 34

Comm: tcp://127.0.0.1:36358 Total threads: 4
Dashboard: http://127.0.0.1:35224/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34148
Local directory: /tmp/dask-worker-space/worker-2qf24csi

Worker: 35

Comm: tcp://127.0.0.1:37043 Total threads: 4
Dashboard: http://127.0.0.1:37109/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43116
Local directory: /tmp/dask-worker-space/worker-i3y4cgou

Worker: 36

Comm: tcp://127.0.0.1:36266 Total threads: 4
Dashboard: http://127.0.0.1:35459/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34374
Local directory: /tmp/dask-worker-space/worker-6bpkmenw

Worker: 37

Comm: tcp://127.0.0.1:35265 Total threads: 4
Dashboard: http://127.0.0.1:43759/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38450
Local directory: /tmp/dask-worker-space/worker-ffj8xacv

Worker: 38

Comm: tcp://127.0.0.1:44678 Total threads: 4
Dashboard: http://127.0.0.1:34375/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39070
Local directory: /tmp/dask-worker-space/worker-85_9250u

Worker: 39

Comm: tcp://127.0.0.1:40577 Total threads: 4
Dashboard: http://127.0.0.1:32921/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46822
Local directory: /tmp/dask-worker-space/worker-4wv7sqif

Worker: 40

Comm: tcp://127.0.0.1:39166 Total threads: 4
Dashboard: http://127.0.0.1:35272/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33027
Local directory: /tmp/dask-worker-space/worker-emsizz2r

Worker: 41

Comm: tcp://127.0.0.1:43651 Total threads: 4
Dashboard: http://127.0.0.1:40316/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34895
Local directory: /tmp/dask-worker-space/worker-g_a1krcw

Worker: 42

Comm: tcp://127.0.0.1:41483 Total threads: 4
Dashboard: http://127.0.0.1:37116/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44274
Local directory: /tmp/dask-worker-space/worker-ssp7qau2

Worker: 43

Comm: tcp://127.0.0.1:42982 Total threads: 4
Dashboard: http://127.0.0.1:43804/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42580
Local directory: /tmp/dask-worker-space/worker-fbwtf9rj

Worker: 44

Comm: tcp://127.0.0.1:44056 Total threads: 4
Dashboard: http://127.0.0.1:35920/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45256
Local directory: /tmp/dask-worker-space/worker-01qv4zm2

Worker: 45

Comm: tcp://127.0.0.1:40261 Total threads: 4
Dashboard: http://127.0.0.1:46557/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42667
Local directory: /tmp/dask-worker-space/worker-w11pqtlx

Worker: 46

Comm: tcp://127.0.0.1:34446 Total threads: 4
Dashboard: http://127.0.0.1:33170/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38865
Local directory: /tmp/dask-worker-space/worker-wcar5xqv

Worker: 47

Comm: tcp://127.0.0.1:37158 Total threads: 4
Dashboard: http://127.0.0.1:34389/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42815
Local directory: /tmp/dask-worker-space/worker-xxm0qeel

Worker: 48

Comm: tcp://127.0.0.1:44990 Total threads: 4
Dashboard: http://127.0.0.1:45575/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37932
Local directory: /tmp/dask-worker-space/worker-ej53egk3

Worker: 49

Comm: tcp://127.0.0.1:36887 Total threads: 4
Dashboard: http://127.0.0.1:46496/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39055
Local directory: /tmp/dask-worker-space/worker-nbkm9a3i

Worker: 50

Comm: tcp://127.0.0.1:40840 Total threads: 4
Dashboard: http://127.0.0.1:46580/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42463
Local directory: /tmp/dask-worker-space/worker-7t8_vkyq

Worker: 51

Comm: tcp://127.0.0.1:37174 Total threads: 4
Dashboard: http://127.0.0.1:34479/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33495
Local directory: /tmp/dask-worker-space/worker-n94f5bkc

Worker: 52

Comm: tcp://127.0.0.1:40516 Total threads: 4
Dashboard: http://127.0.0.1:32867/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36835
Local directory: /tmp/dask-worker-space/worker-fi51mha2

Worker: 53

Comm: tcp://127.0.0.1:42416 Total threads: 4
Dashboard: http://127.0.0.1:36647/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43258
Local directory: /tmp/dask-worker-space/worker-4j2gea9d

Worker: 54

Comm: tcp://127.0.0.1:46151 Total threads: 4
Dashboard: http://127.0.0.1:44514/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35131
Local directory: /tmp/dask-worker-space/worker-ftp7loyc

Worker: 55

Comm: tcp://127.0.0.1:33539 Total threads: 4
Dashboard: http://127.0.0.1:40496/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36770
Local directory: /tmp/dask-worker-space/worker-o_p75v7y

Worker: 56

Comm: tcp://127.0.0.1:46658 Total threads: 4
Dashboard: http://127.0.0.1:35134/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42850
Local directory: /tmp/dask-worker-space/worker-u8w6l54x

Worker: 57

Comm: tcp://127.0.0.1:46219 Total threads: 4
Dashboard: http://127.0.0.1:33874/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42725
Local directory: /tmp/dask-worker-space/worker-g73d4ou1

Worker: 58

Comm: tcp://127.0.0.1:43241 Total threads: 4
Dashboard: http://127.0.0.1:36348/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40566
Local directory: /tmp/dask-worker-space/worker-5vir4s27

Worker: 59

Comm: tcp://127.0.0.1:45113 Total threads: 4
Dashboard: http://127.0.0.1:35479/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40497
Local directory: /tmp/dask-worker-space/worker-iekghvw8

Worker: 60

Comm: tcp://127.0.0.1:39652 Total threads: 4
Dashboard: http://127.0.0.1:45545/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45406
Local directory: /tmp/dask-worker-space/worker-a4_elg9t

Worker: 61

Comm: tcp://127.0.0.1:33284 Total threads: 4
Dashboard: http://127.0.0.1:42856/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44366
Local directory: /tmp/dask-worker-space/worker-3w51abqe

Worker: 62

Comm: tcp://127.0.0.1:44328 Total threads: 4
Dashboard: http://127.0.0.1:45776/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37534
Local directory: /tmp/dask-worker-space/worker-v0a6s0cu

Worker: 63

Comm: tcp://127.0.0.1:32897 Total threads: 4
Dashboard: http://127.0.0.1:42320/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41758
Local directory: /tmp/dask-worker-space/worker-banrpnxl

read plotting information from a csv file¶

In [4]:
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Out[4]:
Value Inputs Equation Zone Plot Colourmap MinMax Unit Oldname Unnamed: 10
SSH_anomaly gridT-2D.ssh calc.SSH_anomaly(data) ALL maps Spectral_r (-0.35,0.35) m M-2

Computation starts here¶

Each computation consists of

  1. Load NEMO data set
  2. Zoom data set
  3. Compute (or load computed data set)
  4. Save
  5. Plot
  6. Close
In [5]:
%%time
import os
calcswitch=os.environ.get('calc', 'True') 
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any()) 
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0 
data
calcswitch= True df.Inputs != nothing True lazy= False
../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  ../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  <bound method DataSourceBase.describe of sources:
  param_xios:
    args:
      combine: nested
      concat_dim: y
      urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc
      xarray_kwargs:
        compat: override
        coords: minimal
        data_vars: minimal
        parallel: true
    description: SEDNA NEMO parameters from MPI output  nav_lon lat fails
    driver: intake_xarray.netcdf.NetCDFSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
{'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output  nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}}
0 read gridT-2D ['ssh']
lazy= False
using load_data_xios_kerchunk reading  gridT-2D
using load_data_xios_kerchunk reading  <bound method DataSourceBase.describe of sources:
  data_xios_kerchunk:
    args:
      consolidated: false
      storage_options:
        fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201201/gridT-2D_0[0-5][0-9][0-9].json
        target_protocol: file
      urlpath: reference://
    description: CREG025 NEMO outputs from different xios server in kerchunk format
    driver: intake_xarray.xzarr.ZarrSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
File <timed exec>:6, in <module>

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:677, in datas(catalog_url, dfi, month, year, daskreport, lazy)
    672 datadict, paramdict = getdict(dfi)
    673 #print('datadict:',datadict)
    674 #if datadict == {}:
    675 #    data=0
    676 #else:
--> 677 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 
    678 for s in paramdict:
    679     print('param',s,'will be included in data')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:496, in outputs(catalog_url, datadict, month, year, daskreport, lazy)
    493 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"):
    494     #ds=load_data_xios_patch(cat,filename,month,catalog_url) 
    495     print("lazy=",lazy)
--> 496     ds = load_data_xios(cat,filename,items,month,year) if ('True' in lazy)  else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True)
    497 extime=time.time() - start
    498 print('      took', extime, 'seconds')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:467, in load_data_xios_kerchunk(cat, filename, items, month, year, rome)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
--> 467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
    470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:470, in <listcomp>(.0)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
    467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
--> 470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self)
     67 def to_dask(self):
     68     """Return xarray object where variables are dask arrays"""
---> 69     return self.read_chunked()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self)
     42 def read_chunked(self):
     43     """Return xarray object (which will have chunks)"""
---> 44     self._load_metadata()
     45     return self._ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self)
    234 """load metadata only if needed"""
    235 if self._schema is None:
--> 236     self._schema = self._get_schema()
    237     self.dtype = self._schema.dtype
    238     self.shape = self._schema.shape

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self)
     15 self.urlpath = self._get_cache(self.urlpath)[0]
     17 if self._ds is None:
---> 18     self._open_dataset()
     20     metadata = {
     21         'dims': dict(self._ds.dims),
     22         'data_vars': {k: list(self._ds[k].coords)
     23                       for k in self._ds.data_vars.keys()},
     24         'coords': tuple(self._ds.coords.keys()),
     25     }
     26     if getattr(self, 'on_server', False):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/xzarr.py:46, in ZarrSource._open_dataset(self)
     44     self._ds = xr.open_mfdataset(self.urlpath, **kw)
     45 else:
---> 46     self._ds = xr.open_dataset(self.urlpath, **kw)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, backend_kwargs, **kwargs)
    519 decoders = _resolve_decoders_kwargs(
    520     decode_cf,
    521     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    527     decode_coords=decode_coords,
    528 )
    530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 531 backend_ds = backend.open_dataset(
    532     filename_or_obj,
    533     drop_variables=drop_variables,
    534     **decoders,
    535     **kwargs,
    536 )
    537 ds = _dataset_from_backend_dataset(
    538     backend_ds,
    539     filename_or_obj,
   (...)
    547     **kwargs,
    548 )
    549 return ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:837, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel)
    817 def open_dataset(
    818     self,
    819     filename_or_obj,
   (...)
    833     stacklevel=3,
    834 ):
    836     filename_or_obj = _normalize_path(filename_or_obj)
--> 837     store = ZarrStore.open_group(
    838         filename_or_obj,
    839         group=group,
    840         mode=mode,
    841         synchronizer=synchronizer,
    842         consolidated=consolidated,
    843         consolidate_on_close=False,
    844         chunk_store=chunk_store,
    845         storage_options=storage_options,
    846         stacklevel=stacklevel + 1,
    847     )
    849     store_entrypoint = StoreBackendEntrypoint()
    850     with close_on_error(store):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:406, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel)
    404     zarr_group = zarr.open_consolidated(store, **open_kwargs)
    405 else:
--> 406     zarr_group = zarr.open_group(store, **open_kwargs)
    407 return cls(
    408     zarr_group,
    409     mode,
   (...)
    413     safe_chunks,
    414 )

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1316, in open_group(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version)
   1270 """Open a group using file-mode-like semantics.
   1271 
   1272 Parameters
   (...)
   1312 
   1313 """
   1315 # handle polymorphic store arg
-> 1316 store = _normalize_store_arg(
   1317     store, storage_options=storage_options, mode=mode,
   1318     zarr_version=zarr_version)
   1319 if zarr_version is None:
   1320     zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1192, in _normalize_store_arg(store, storage_options, mode, zarr_version)
   1190 if store is None:
   1191     return MemoryStore() if zarr_version == 2 else MemoryStoreV3()
-> 1192 return normalize_store_arg(store,
   1193                            storage_options=storage_options, mode=mode,
   1194                            zarr_version=zarr_version)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version)
    168     from zarr._storage.v3 import _normalize_store_arg_v3
    169     normalize_store = _normalize_store_arg_v3
--> 170 return normalize_store(store, storage_options, mode)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode)
    141 if isinstance(store, str):
    142     if "://" in store or "::" in store:
--> 143         return FSStore(store, mode=mode, **(storage_options or {}))
    144     elif storage_options:
    145         raise ValueError("storage_options passed with non-fsspec path")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.__init__(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options)
   1319 if protocol in (None, "file") and not storage_options.get("auto_mkdir"):
   1320     storage_options["auto_mkdir"] = True
-> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options})
   1322 self.fs = self.map.fs  # for direct operations
   1323 self.path = self.fs._strip_protocol(url)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs)
    199 """Create key-value interface for given URL and options
    200 
    201 The URL will be of the form "protocol://location" and point to the root
   (...)
    227 ``FSMap`` instance, the dict-like key-value store.
    228 """
    229 # Removing protocol here - could defer to each open() on the backend
--> 230 fs, urlpath = url_to_fs(url, **kwargs)
    231 root = alternate_root if alternate_root is not None else urlpath
    232 return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:412, in url_to_fs(url, **kwargs)
    410     options = cls._get_kwargs_from_urls(url)
    411     update_storage_options(options, kwargs)
--> 412     fs = cls(**options)
    413     urlpath = fs._strip_protocol(url)
    414 return fs, urlpath

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:76, in _Cached.__call__(cls, *args, **kwargs)
     74     return cls._cache[token]
     75 else:
---> 76     obj = super().__call__(*args, **kwargs)
     77     # Setting _fs_token here causes some static linters to complain.
     78     obj._fs_token_ = token

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/reference.py:140, in ReferenceFileSystem.__init__(self, fo, target, ref_storage_args, target_protocol, target_options, remote_protocol, remote_options, fs, template_overrides, simple_templates, loop, **kwargs)
    138 dic = dict(**(ref_storage_args or target_options or {}), **extra)
    139 # text JSON
--> 140 with open(fo, "rb", **dic) as f:
    141     logger.info("Read reference from URL %s", fo)
    142     text = f.read()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:104, in OpenFile.__enter__(self)
    101 def __enter__(self):
    102     mode = self.mode.replace("t", "").replace("b", "") + "b"
--> 104     f = self.fs.open(self.path, mode=mode)
    106     self.fobjects = [f]
    108     if self.compression is not None:

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:1037, in AbstractFileSystem.open(self, path, mode, block_size, cache_options, compression, **kwargs)
   1035 else:
   1036     ac = kwargs.pop("autocommit", not self._intrans)
-> 1037     f = self._open(
   1038         path,
   1039         mode=mode,
   1040         block_size=block_size,
   1041         autocommit=ac,
   1042         cache_options=cache_options,
   1043         **kwargs,
   1044     )
   1045     if compression is not None:
   1046         from fsspec.compression import compr

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:159, in LocalFileSystem._open(self, path, mode, block_size, **kwargs)
    157 if self.auto_mkdir and "w" in mode:
    158     self.makedirs(self._parent(path), exist_ok=True)
--> 159 return LocalFileOpener(path, mode, fs=self, **kwargs)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:254, in LocalFileOpener.__init__(self, path, mode, autocommit, fs, compression, **kwargs)
    252 self.compression = get_compression(path, compression)
    253 self.blocksize = io.DEFAULT_BUFFER_SIZE
--> 254 self._open()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:259, in LocalFileOpener._open(self)
    257 if self.f is None or self.f.closed:
    258     if self.autocommit or "w" not in self.mode:
--> 259         self.f = open(self.path, mode=self.mode)
    260         if self.compression:
    261             compress = compr[self.compression]

FileNotFoundError: [Errno 2] No such file or directory: '//ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201201/gridT-2D_0000.json'
In [6]:
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
            )
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
File <timed eval>:1, in <module>

NameError: name 'data' is not defined