In [1]:
%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()

from core import  load, zoom, calc, save,plots,monitor
In [2]:
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
Out[2]:
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>

If you submit the job with job scheduler, above¶

below are list of enviroment variable one can pass

%env local='2"¶

local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'

%env ychunk='2'¶

%env tchunk='2'¶

controls chunk. 'False' sets no modification from original netcdf file's chunk.¶

ychunk=10 will group the original netcdf file to 10 by 10¶

tchunk=1 will chunk the time coordinate one by one¶

%env file_exp=¶

'file_exp': Which 'experiment' name is it?¶

. this corresopnds to intake catalog name without path and .yaml¶

%env year=¶

for Validation, this correspoinds to path/year/month 's year¶

for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory¶

setting it as 0[0-9] &1[0-9]& *[2-3][0-9], the job can be separated in three lots.¶

%env month=¶

for monitoring this corresponds to file path path-XIOS.{month}/¶

#

%env control=FWC_SSH¶

name of control file to be used for computation/plots/save/ & how it is called from Monitor.sh¶

Monitor.sh calls M_MLD_2D

and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh

  • AWTD.sh M_AWTMD

  • Fluxnet.sh M_Fluxnet

  • Siconc.sh M_Ice_quantities
  • IceClim.sh M_IceClim M_IceConce M_IceThick

FWC_SSH.sh M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly

Integrals.sh M_Mean_temp_velo M_Mooring M_Sectionx M_Sectiony

%env save= proceed saving? True or False , Default is setted as True¶

%env plot= proceed plotting? True or False , Default is setted as True¶

%env calc= proceed computation? or just load computed result? True or False , Default is setted as True¶

%env save=False¶

%env lazy=False¶

For debugging this cell can help¶

%env file_exp=SEDNA_DELTA_MONITOR %env year=2012 %env month=01

0[1-2]¶

%env ychunk=10 %env ychunk=False %env save=False %env plot=True %env calc=True # %env lazy=False

False¶

%env control=M_Fluxnet

M_Sectiony ok with ychunk=False local=True lazy=False¶

In [3]:
%%time
# 'savefig': Do we save output in html? or not. keep it true. 
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True
using host= irene4746.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16
10000000000
False
tgcc local cluster starting
This code is running on  irene4746.c-irene.mg1.tgcc.ccc.cea.fr using  SEDNA_DELTA_MONITOR file experiment, read from  ../lib/SEDNA_DELTA_MONITOR.yaml  on year= 2012  on month= 02  outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6419178irene4746.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_02M_SSH_anomaly/
CPU times: user 3.88 s, sys: 784 ms, total: 4.66 s
Wall time: 1min 42s
Out[3]:

Client

Client-9d6e418a-13d9-11ed-b0fe-080038b93643

Connection method: Cluster object Cluster type: distributed.LocalCluster
Dashboard: http://127.0.0.1:8787/status

Cluster Info

LocalCluster

6628fc6c

Dashboard: http://127.0.0.1:8787/status Workers: 64
Total threads: 256 Total memory: 251.06 GiB
Status: running Using processes: True

Scheduler Info

Scheduler

Scheduler-13e0cdf0-94e4-41cc-b767-81b403aa976f

Comm: tcp://127.0.0.1:33896 Workers: 64
Dashboard: http://127.0.0.1:8787/status Total threads: 256
Started: 1 minute ago Total memory: 251.06 GiB

Workers

Worker: 0

Comm: tcp://127.0.0.1:37169 Total threads: 4
Dashboard: http://127.0.0.1:42786/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33499
Local directory: /tmp/dask-worker-space/worker-bu3mc0_y

Worker: 1

Comm: tcp://127.0.0.1:39486 Total threads: 4
Dashboard: http://127.0.0.1:40682/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35381
Local directory: /tmp/dask-worker-space/worker-9i9yd_oc

Worker: 2

Comm: tcp://127.0.0.1:42722 Total threads: 4
Dashboard: http://127.0.0.1:34857/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36414
Local directory: /tmp/dask-worker-space/worker-e2zop4aq

Worker: 3

Comm: tcp://127.0.0.1:42497 Total threads: 4
Dashboard: http://127.0.0.1:45400/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46314
Local directory: /tmp/dask-worker-space/worker-k2nvysh5

Worker: 4

Comm: tcp://127.0.0.1:41041 Total threads: 4
Dashboard: http://127.0.0.1:33207/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36294
Local directory: /tmp/dask-worker-space/worker-rp6_cuye

Worker: 5

Comm: tcp://127.0.0.1:38571 Total threads: 4
Dashboard: http://127.0.0.1:41867/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34394
Local directory: /tmp/dask-worker-space/worker-gimb36u9

Worker: 6

Comm: tcp://127.0.0.1:46579 Total threads: 4
Dashboard: http://127.0.0.1:37715/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33016
Local directory: /tmp/dask-worker-space/worker-y6a7ka0t

Worker: 7

Comm: tcp://127.0.0.1:34518 Total threads: 4
Dashboard: http://127.0.0.1:42958/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37893
Local directory: /tmp/dask-worker-space/worker-bubbkwrj

Worker: 8

Comm: tcp://127.0.0.1:35328 Total threads: 4
Dashboard: http://127.0.0.1:35970/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42758
Local directory: /tmp/dask-worker-space/worker-crpv_s0v

Worker: 9

Comm: tcp://127.0.0.1:44449 Total threads: 4
Dashboard: http://127.0.0.1:36590/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35137
Local directory: /tmp/dask-worker-space/worker-2o5adz3r

Worker: 10

Comm: tcp://127.0.0.1:43380 Total threads: 4
Dashboard: http://127.0.0.1:43263/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46455
Local directory: /tmp/dask-worker-space/worker-a_ve57uz

Worker: 11

Comm: tcp://127.0.0.1:34320 Total threads: 4
Dashboard: http://127.0.0.1:40726/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46261
Local directory: /tmp/dask-worker-space/worker-b8tqu7zn

Worker: 12

Comm: tcp://127.0.0.1:35173 Total threads: 4
Dashboard: http://127.0.0.1:41666/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34025
Local directory: /tmp/dask-worker-space/worker-464vqy57

Worker: 13

Comm: tcp://127.0.0.1:34761 Total threads: 4
Dashboard: http://127.0.0.1:45902/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44409
Local directory: /tmp/dask-worker-space/worker-juacxobb

Worker: 14

Comm: tcp://127.0.0.1:41251 Total threads: 4
Dashboard: http://127.0.0.1:37180/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36862
Local directory: /tmp/dask-worker-space/worker-u3l37_w6

Worker: 15

Comm: tcp://127.0.0.1:37339 Total threads: 4
Dashboard: http://127.0.0.1:34305/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38235
Local directory: /tmp/dask-worker-space/worker-owtnv54g

Worker: 16

Comm: tcp://127.0.0.1:39187 Total threads: 4
Dashboard: http://127.0.0.1:32873/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45607
Local directory: /tmp/dask-worker-space/worker-7oveiwm9

Worker: 17

Comm: tcp://127.0.0.1:42785 Total threads: 4
Dashboard: http://127.0.0.1:41649/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44703
Local directory: /tmp/dask-worker-space/worker-vvosnbj6

Worker: 18

Comm: tcp://127.0.0.1:43458 Total threads: 4
Dashboard: http://127.0.0.1:41604/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43938
Local directory: /tmp/dask-worker-space/worker-o165ftwt

Worker: 19

Comm: tcp://127.0.0.1:34051 Total threads: 4
Dashboard: http://127.0.0.1:40355/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46710
Local directory: /tmp/dask-worker-space/worker-opa63em1

Worker: 20

Comm: tcp://127.0.0.1:37305 Total threads: 4
Dashboard: http://127.0.0.1:37075/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38483
Local directory: /tmp/dask-worker-space/worker-nkhjn9z4

Worker: 21

Comm: tcp://127.0.0.1:40693 Total threads: 4
Dashboard: http://127.0.0.1:41855/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33128
Local directory: /tmp/dask-worker-space/worker-wvxgr0ur

Worker: 22

Comm: tcp://127.0.0.1:35323 Total threads: 4
Dashboard: http://127.0.0.1:38354/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43639
Local directory: /tmp/dask-worker-space/worker-wc0fozdd

Worker: 23

Comm: tcp://127.0.0.1:33404 Total threads: 4
Dashboard: http://127.0.0.1:38608/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34175
Local directory: /tmp/dask-worker-space/worker-5bkelx1q

Worker: 24

Comm: tcp://127.0.0.1:46362 Total threads: 4
Dashboard: http://127.0.0.1:44092/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35273
Local directory: /tmp/dask-worker-space/worker-e1o0a4d6

Worker: 25

Comm: tcp://127.0.0.1:42547 Total threads: 4
Dashboard: http://127.0.0.1:39307/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44851
Local directory: /tmp/dask-worker-space/worker-p0ogedp0

Worker: 26

Comm: tcp://127.0.0.1:44640 Total threads: 4
Dashboard: http://127.0.0.1:35641/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38014
Local directory: /tmp/dask-worker-space/worker-vsgi941w

Worker: 27

Comm: tcp://127.0.0.1:39016 Total threads: 4
Dashboard: http://127.0.0.1:45248/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35774
Local directory: /tmp/dask-worker-space/worker-swes7dyd

Worker: 28

Comm: tcp://127.0.0.1:33061 Total threads: 4
Dashboard: http://127.0.0.1:33196/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44601
Local directory: /tmp/dask-worker-space/worker-3lch6vpw

Worker: 29

Comm: tcp://127.0.0.1:45437 Total threads: 4
Dashboard: http://127.0.0.1:36776/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38528
Local directory: /tmp/dask-worker-space/worker-w3_xbeok

Worker: 30

Comm: tcp://127.0.0.1:38697 Total threads: 4
Dashboard: http://127.0.0.1:39227/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43991
Local directory: /tmp/dask-worker-space/worker-63drvo27

Worker: 31

Comm: tcp://127.0.0.1:35657 Total threads: 4
Dashboard: http://127.0.0.1:34771/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36516
Local directory: /tmp/dask-worker-space/worker-gw3v7f7u

Worker: 32

Comm: tcp://127.0.0.1:41265 Total threads: 4
Dashboard: http://127.0.0.1:41772/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35958
Local directory: /tmp/dask-worker-space/worker-o9nkgjn5

Worker: 33

Comm: tcp://127.0.0.1:37482 Total threads: 4
Dashboard: http://127.0.0.1:32792/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:32789
Local directory: /tmp/dask-worker-space/worker-if4d5n8m

Worker: 34

Comm: tcp://127.0.0.1:40159 Total threads: 4
Dashboard: http://127.0.0.1:35238/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45980
Local directory: /tmp/dask-worker-space/worker-5zm290ks

Worker: 35

Comm: tcp://127.0.0.1:33330 Total threads: 4
Dashboard: http://127.0.0.1:36844/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33883
Local directory: /tmp/dask-worker-space/worker-o98hpyzz

Worker: 36

Comm: tcp://127.0.0.1:38459 Total threads: 4
Dashboard: http://127.0.0.1:35268/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40644
Local directory: /tmp/dask-worker-space/worker-qznuy8bx

Worker: 37

Comm: tcp://127.0.0.1:42647 Total threads: 4
Dashboard: http://127.0.0.1:37692/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45448
Local directory: /tmp/dask-worker-space/worker-z4hsoxra

Worker: 38

Comm: tcp://127.0.0.1:36584 Total threads: 4
Dashboard: http://127.0.0.1:43370/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36727
Local directory: /tmp/dask-worker-space/worker-_ea61t1b

Worker: 39

Comm: tcp://127.0.0.1:41396 Total threads: 4
Dashboard: http://127.0.0.1:36112/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38888
Local directory: /tmp/dask-worker-space/worker-x46al48m

Worker: 40

Comm: tcp://127.0.0.1:44644 Total threads: 4
Dashboard: http://127.0.0.1:35789/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35266
Local directory: /tmp/dask-worker-space/worker-7kgufby7

Worker: 41

Comm: tcp://127.0.0.1:46430 Total threads: 4
Dashboard: http://127.0.0.1:36045/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42971
Local directory: /tmp/dask-worker-space/worker-14z9x3gv

Worker: 42

Comm: tcp://127.0.0.1:36028 Total threads: 4
Dashboard: http://127.0.0.1:37407/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35075
Local directory: /tmp/dask-worker-space/worker-46nfpxez

Worker: 43

Comm: tcp://127.0.0.1:35852 Total threads: 4
Dashboard: http://127.0.0.1:38212/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34640
Local directory: /tmp/dask-worker-space/worker-w89vcafx

Worker: 44

Comm: tcp://127.0.0.1:43077 Total threads: 4
Dashboard: http://127.0.0.1:46229/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37629
Local directory: /tmp/dask-worker-space/worker-2zeunqvj

Worker: 45

Comm: tcp://127.0.0.1:39893 Total threads: 4
Dashboard: http://127.0.0.1:46102/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43707
Local directory: /tmp/dask-worker-space/worker-oiodirxg

Worker: 46

Comm: tcp://127.0.0.1:36582 Total threads: 4
Dashboard: http://127.0.0.1:34293/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37750
Local directory: /tmp/dask-worker-space/worker-fl42x1v1

Worker: 47

Comm: tcp://127.0.0.1:44296 Total threads: 4
Dashboard: http://127.0.0.1:42996/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40101
Local directory: /tmp/dask-worker-space/worker-hj7qrnm9

Worker: 48

Comm: tcp://127.0.0.1:38209 Total threads: 4
Dashboard: http://127.0.0.1:33862/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43484
Local directory: /tmp/dask-worker-space/worker-r_7q5aw9

Worker: 49

Comm: tcp://127.0.0.1:41404 Total threads: 4
Dashboard: http://127.0.0.1:45633/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34989
Local directory: /tmp/dask-worker-space/worker-p6q39ewh

Worker: 50

Comm: tcp://127.0.0.1:44606 Total threads: 4
Dashboard: http://127.0.0.1:42534/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34694
Local directory: /tmp/dask-worker-space/worker-e_c4gjm_

Worker: 51

Comm: tcp://127.0.0.1:37545 Total threads: 4
Dashboard: http://127.0.0.1:40506/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38711
Local directory: /tmp/dask-worker-space/worker-z2uoxkgv

Worker: 52

Comm: tcp://127.0.0.1:39098 Total threads: 4
Dashboard: http://127.0.0.1:42849/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43822
Local directory: /tmp/dask-worker-space/worker-nhpwi49q

Worker: 53

Comm: tcp://127.0.0.1:45185 Total threads: 4
Dashboard: http://127.0.0.1:46347/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42402
Local directory: /tmp/dask-worker-space/worker-c1rr656w

Worker: 54

Comm: tcp://127.0.0.1:32974 Total threads: 4
Dashboard: http://127.0.0.1:41572/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:32954
Local directory: /tmp/dask-worker-space/worker-ertwekvy

Worker: 55

Comm: tcp://127.0.0.1:42069 Total threads: 4
Dashboard: http://127.0.0.1:40206/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38891
Local directory: /tmp/dask-worker-space/worker-xqnmfbzj

Worker: 56

Comm: tcp://127.0.0.1:39819 Total threads: 4
Dashboard: http://127.0.0.1:37899/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39266
Local directory: /tmp/dask-worker-space/worker-q9jxj23p

Worker: 57

Comm: tcp://127.0.0.1:41612 Total threads: 4
Dashboard: http://127.0.0.1:39407/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34956
Local directory: /tmp/dask-worker-space/worker-pleufye4

Worker: 58

Comm: tcp://127.0.0.1:37468 Total threads: 4
Dashboard: http://127.0.0.1:34231/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34283
Local directory: /tmp/dask-worker-space/worker-40nxab5j

Worker: 59

Comm: tcp://127.0.0.1:39523 Total threads: 4
Dashboard: http://127.0.0.1:35497/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34469
Local directory: /tmp/dask-worker-space/worker-rgcpt5h5

Worker: 60

Comm: tcp://127.0.0.1:42825 Total threads: 4
Dashboard: http://127.0.0.1:43347/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33487
Local directory: /tmp/dask-worker-space/worker-4uxq8g6b

Worker: 61

Comm: tcp://127.0.0.1:39054 Total threads: 4
Dashboard: http://127.0.0.1:44001/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42467
Local directory: /tmp/dask-worker-space/worker-bvg4zf39

Worker: 62

Comm: tcp://127.0.0.1:44920 Total threads: 4
Dashboard: http://127.0.0.1:38160/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33357
Local directory: /tmp/dask-worker-space/worker-ndtitbp0

Worker: 63

Comm: tcp://127.0.0.1:46843 Total threads: 4
Dashboard: http://127.0.0.1:33203/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34120
Local directory: /tmp/dask-worker-space/worker-lq7spq3z

read plotting information from a csv file¶

In [4]:
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Out[4]:
Value Inputs Equation Zone Plot Colourmap MinMax Unit Oldname Unnamed: 10
SSH_anomaly gridT-2D.ssh calc.SSH_anomaly(data) ALL maps Spectral_r (-0.35,0.35) m M-2

Computation starts here¶

Each computation consists of

  1. Load NEMO data set
  2. Zoom data set
  3. Compute (or load computed data set)
  4. Save
  5. Plot
  6. Close
In [5]:
%%time
import os
calcswitch=os.environ.get('calc', 'True') 
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any()) 
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0 
data
calcswitch= True df.Inputs != nothing True lazy= False
../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  ../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  <bound method DataSourceBase.describe of sources:
  param_xios:
    args:
      combine: nested
      concat_dim: y
      urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc
      xarray_kwargs:
        compat: override
        coords: minimal
        data_vars: minimal
        parallel: true
    description: SEDNA NEMO parameters from MPI output  nav_lon lat fails
    driver: intake_xarray.netcdf.NetCDFSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
{'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output  nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}}
0 read gridT-2D ['ssh']
lazy= False
using load_data_xios_kerchunk reading  gridT-2D
using load_data_xios_kerchunk reading  <bound method DataSourceBase.describe of sources:
  data_xios_kerchunk:
    args:
      consolidated: false
      storage_options:
        fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201202/gridT-2D_0[0-5][0-9][0-9].json
        target_protocol: file
      urlpath: reference://
    description: CREG025 NEMO outputs from different xios server in kerchunk format
    driver: intake_xarray.xzarr.ZarrSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
File <timed exec>:6, in <module>

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:677, in datas(catalog_url, dfi, month, year, daskreport, lazy)
    672 datadict, paramdict = getdict(dfi)
    673 #print('datadict:',datadict)
    674 #if datadict == {}:
    675 #    data=0
    676 #else:
--> 677 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 
    678 for s in paramdict:
    679     print('param',s,'will be included in data')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:496, in outputs(catalog_url, datadict, month, year, daskreport, lazy)
    493 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"):
    494     #ds=load_data_xios_patch(cat,filename,month,catalog_url) 
    495     print("lazy=",lazy)
--> 496     ds = load_data_xios(cat,filename,items,month,year) if ('True' in lazy)  else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True)
    497 extime=time.time() - start
    498 print('      took', extime, 'seconds')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:467, in load_data_xios_kerchunk(cat, filename, items, month, year, rome)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
--> 467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
    470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:470, in <listcomp>(.0)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
    467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
--> 470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self)
     67 def to_dask(self):
     68     """Return xarray object where variables are dask arrays"""
---> 69     return self.read_chunked()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self)
     42 def read_chunked(self):
     43     """Return xarray object (which will have chunks)"""
---> 44     self._load_metadata()
     45     return self._ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self)
    234 """load metadata only if needed"""
    235 if self._schema is None:
--> 236     self._schema = self._get_schema()
    237     self.dtype = self._schema.dtype
    238     self.shape = self._schema.shape

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self)
     15 self.urlpath = self._get_cache(self.urlpath)[0]
     17 if self._ds is None:
---> 18     self._open_dataset()
     20     metadata = {
     21         'dims': dict(self._ds.dims),
     22         'data_vars': {k: list(self._ds[k].coords)
     23                       for k in self._ds.data_vars.keys()},
     24         'coords': tuple(self._ds.coords.keys()),
     25     }
     26     if getattr(self, 'on_server', False):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/xzarr.py:46, in ZarrSource._open_dataset(self)
     44     self._ds = xr.open_mfdataset(self.urlpath, **kw)
     45 else:
---> 46     self._ds = xr.open_dataset(self.urlpath, **kw)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, backend_kwargs, **kwargs)
    519 decoders = _resolve_decoders_kwargs(
    520     decode_cf,
    521     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    527     decode_coords=decode_coords,
    528 )
    530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 531 backend_ds = backend.open_dataset(
    532     filename_or_obj,
    533     drop_variables=drop_variables,
    534     **decoders,
    535     **kwargs,
    536 )
    537 ds = _dataset_from_backend_dataset(
    538     backend_ds,
    539     filename_or_obj,
   (...)
    547     **kwargs,
    548 )
    549 return ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:837, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel)
    817 def open_dataset(
    818     self,
    819     filename_or_obj,
   (...)
    833     stacklevel=3,
    834 ):
    836     filename_or_obj = _normalize_path(filename_or_obj)
--> 837     store = ZarrStore.open_group(
    838         filename_or_obj,
    839         group=group,
    840         mode=mode,
    841         synchronizer=synchronizer,
    842         consolidated=consolidated,
    843         consolidate_on_close=False,
    844         chunk_store=chunk_store,
    845         storage_options=storage_options,
    846         stacklevel=stacklevel + 1,
    847     )
    849     store_entrypoint = StoreBackendEntrypoint()
    850     with close_on_error(store):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:406, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel)
    404     zarr_group = zarr.open_consolidated(store, **open_kwargs)
    405 else:
--> 406     zarr_group = zarr.open_group(store, **open_kwargs)
    407 return cls(
    408     zarr_group,
    409     mode,
   (...)
    413     safe_chunks,
    414 )

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1316, in open_group(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version)
   1270 """Open a group using file-mode-like semantics.
   1271 
   1272 Parameters
   (...)
   1312 
   1313 """
   1315 # handle polymorphic store arg
-> 1316 store = _normalize_store_arg(
   1317     store, storage_options=storage_options, mode=mode,
   1318     zarr_version=zarr_version)
   1319 if zarr_version is None:
   1320     zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1192, in _normalize_store_arg(store, storage_options, mode, zarr_version)
   1190 if store is None:
   1191     return MemoryStore() if zarr_version == 2 else MemoryStoreV3()
-> 1192 return normalize_store_arg(store,
   1193                            storage_options=storage_options, mode=mode,
   1194                            zarr_version=zarr_version)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version)
    168     from zarr._storage.v3 import _normalize_store_arg_v3
    169     normalize_store = _normalize_store_arg_v3
--> 170 return normalize_store(store, storage_options, mode)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode)
    141 if isinstance(store, str):
    142     if "://" in store or "::" in store:
--> 143         return FSStore(store, mode=mode, **(storage_options or {}))
    144     elif storage_options:
    145         raise ValueError("storage_options passed with non-fsspec path")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.__init__(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options)
   1319 if protocol in (None, "file") and not storage_options.get("auto_mkdir"):
   1320     storage_options["auto_mkdir"] = True
-> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options})
   1322 self.fs = self.map.fs  # for direct operations
   1323 self.path = self.fs._strip_protocol(url)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs)
    199 """Create key-value interface for given URL and options
    200 
    201 The URL will be of the form "protocol://location" and point to the root
   (...)
    227 ``FSMap`` instance, the dict-like key-value store.
    228 """
    229 # Removing protocol here - could defer to each open() on the backend
--> 230 fs, urlpath = url_to_fs(url, **kwargs)
    231 root = alternate_root if alternate_root is not None else urlpath
    232 return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:412, in url_to_fs(url, **kwargs)
    410     options = cls._get_kwargs_from_urls(url)
    411     update_storage_options(options, kwargs)
--> 412     fs = cls(**options)
    413     urlpath = fs._strip_protocol(url)
    414 return fs, urlpath

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:76, in _Cached.__call__(cls, *args, **kwargs)
     74     return cls._cache[token]
     75 else:
---> 76     obj = super().__call__(*args, **kwargs)
     77     # Setting _fs_token here causes some static linters to complain.
     78     obj._fs_token_ = token

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/reference.py:140, in ReferenceFileSystem.__init__(self, fo, target, ref_storage_args, target_protocol, target_options, remote_protocol, remote_options, fs, template_overrides, simple_templates, loop, **kwargs)
    138 dic = dict(**(ref_storage_args or target_options or {}), **extra)
    139 # text JSON
--> 140 with open(fo, "rb", **dic) as f:
    141     logger.info("Read reference from URL %s", fo)
    142     text = f.read()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:104, in OpenFile.__enter__(self)
    101 def __enter__(self):
    102     mode = self.mode.replace("t", "").replace("b", "") + "b"
--> 104     f = self.fs.open(self.path, mode=mode)
    106     self.fobjects = [f]
    108     if self.compression is not None:

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:1037, in AbstractFileSystem.open(self, path, mode, block_size, cache_options, compression, **kwargs)
   1035 else:
   1036     ac = kwargs.pop("autocommit", not self._intrans)
-> 1037     f = self._open(
   1038         path,
   1039         mode=mode,
   1040         block_size=block_size,
   1041         autocommit=ac,
   1042         cache_options=cache_options,
   1043         **kwargs,
   1044     )
   1045     if compression is not None:
   1046         from fsspec.compression import compr

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:159, in LocalFileSystem._open(self, path, mode, block_size, **kwargs)
    157 if self.auto_mkdir and "w" in mode:
    158     self.makedirs(self._parent(path), exist_ok=True)
--> 159 return LocalFileOpener(path, mode, fs=self, **kwargs)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:254, in LocalFileOpener.__init__(self, path, mode, autocommit, fs, compression, **kwargs)
    252 self.compression = get_compression(path, compression)
    253 self.blocksize = io.DEFAULT_BUFFER_SIZE
--> 254 self._open()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:259, in LocalFileOpener._open(self)
    257 if self.f is None or self.f.closed:
    258     if self.autocommit or "w" not in self.mode:
--> 259         self.f = open(self.path, mode=self.mode)
    260         if self.compression:
    261             compress = compr[self.compression]

FileNotFoundError: [Errno 2] No such file or directory: '//ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201202/gridT-2D_0000.json'
In [6]:
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
            )
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
File <timed eval>:1, in <module>

NameError: name 'data' is not defined