In [1]:
%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()

from core import  load, zoom, calc, save,plots,monitor
In [2]:
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
Out[2]:
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>

If you submit the job with job scheduler, above¶

below are list of enviroment variable one can pass

%env local='2"¶

local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'

%env ychunk='2'¶

%env tchunk='2'¶

controls chunk. 'False' sets no modification from original netcdf file's chunk.¶

ychunk=10 will group the original netcdf file to 10 by 10¶

tchunk=1 will chunk the time coordinate one by one¶

%env file_exp=¶

'file_exp': Which 'experiment' name is it?¶

. this corresopnds to intake catalog name without path and .yaml¶

%env year=¶

for Validation, this correspoinds to path/year/month 's year¶

for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory¶

setting it as 0[0-9] &1[0-9]& *[2-3][0-9], the job can be separated in three lots.¶

%env month=¶

for monitoring this corresponds to file path path-XIOS.{month}/¶

#

%env control=FWC_SSH¶

name of control file to be used for computation/plots/save/ & how it is called from Monitor.sh¶

Monitor.sh calls M_MLD_2D

and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh

  • AWTD.sh M_AWTMD

  • Fluxnet.sh M_Fluxnet

  • Siconc.sh M_Ice_quantities
  • IceClim.sh M_IceClim M_IceConce M_IceThick

FWC_SSH.sh M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly

Integrals.sh M_Mean_temp_velo M_Mooring M_Sectionx M_Sectiony

%env save= proceed saving? True or False , Default is setted as True¶

%env plot= proceed plotting? True or False , Default is setted as True¶

%env calc= proceed computation? or just load computed result? True or False , Default is setted as True¶

%env save=False¶

%env lazy=False¶

For debugging this cell can help¶

%env file_exp=SEDNA_DELTA_MONITOR %env year=2012 %env month=01

0[1-2]¶

%env ychunk=10 %env ychunk=False %env save=False %env plot=True %env calc=True # %env lazy=False

False¶

%env control=M_Fluxnet

M_Sectiony ok with ychunk=False local=True lazy=False¶

In [3]:
%%time
# 'savefig': Do we save output in html? or not. keep it true. 
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True
using host= irene4722.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16
10000000000
False
tgcc local cluster starting
This code is running on  irene4722.c-irene.mg1.tgcc.ccc.cea.fr using  SEDNA_DELTA_MONITOR file experiment, read from  ../lib/SEDNA_DELTA_MONITOR.yaml  on year= 2012  on month= 01  outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6419176irene4722.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_01M_SSH_anomaly/
CPU times: user 3.91 s, sys: 751 ms, total: 4.66 s
Wall time: 1min 40s
Out[3]:

Client

Client-9cb7c78d-13d9-11ed-accf-080038b94001

Connection method: Cluster object Cluster type: distributed.LocalCluster
Dashboard: http://127.0.0.1:8787/status

Cluster Info

LocalCluster

3a41abcc

Dashboard: http://127.0.0.1:8787/status Workers: 64
Total threads: 256 Total memory: 251.06 GiB
Status: running Using processes: True

Scheduler Info

Scheduler

Scheduler-f2d351af-fbe8-446a-85a6-f3586b7338e8

Comm: tcp://127.0.0.1:33069 Workers: 64
Dashboard: http://127.0.0.1:8787/status Total threads: 256
Started: 1 minute ago Total memory: 251.06 GiB

Workers

Worker: 0

Comm: tcp://127.0.0.1:33451 Total threads: 4
Dashboard: http://127.0.0.1:39103/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38915
Local directory: /tmp/dask-worker-space/worker-g5t874vr

Worker: 1

Comm: tcp://127.0.0.1:38505 Total threads: 4
Dashboard: http://127.0.0.1:36598/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42111
Local directory: /tmp/dask-worker-space/worker-9_uo5m02

Worker: 2

Comm: tcp://127.0.0.1:34932 Total threads: 4
Dashboard: http://127.0.0.1:42087/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45747
Local directory: /tmp/dask-worker-space/worker-k45zggwh

Worker: 3

Comm: tcp://127.0.0.1:41861 Total threads: 4
Dashboard: http://127.0.0.1:44058/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45442
Local directory: /tmp/dask-worker-space/worker-8rgt9jl3

Worker: 4

Comm: tcp://127.0.0.1:45615 Total threads: 4
Dashboard: http://127.0.0.1:43688/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41945
Local directory: /tmp/dask-worker-space/worker-lpam6fhu

Worker: 5

Comm: tcp://127.0.0.1:40460 Total threads: 4
Dashboard: http://127.0.0.1:34037/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36691
Local directory: /tmp/dask-worker-space/worker-quvcb7ig

Worker: 6

Comm: tcp://127.0.0.1:39991 Total threads: 4
Dashboard: http://127.0.0.1:39990/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36961
Local directory: /tmp/dask-worker-space/worker-mikdhuww

Worker: 7

Comm: tcp://127.0.0.1:42001 Total threads: 4
Dashboard: http://127.0.0.1:45450/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37633
Local directory: /tmp/dask-worker-space/worker-rkj9yjb5

Worker: 8

Comm: tcp://127.0.0.1:35151 Total threads: 4
Dashboard: http://127.0.0.1:43863/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42978
Local directory: /tmp/dask-worker-space/worker-x1bkjnnn

Worker: 9

Comm: tcp://127.0.0.1:46602 Total threads: 4
Dashboard: http://127.0.0.1:43745/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38796
Local directory: /tmp/dask-worker-space/worker-m0kq98k7

Worker: 10

Comm: tcp://127.0.0.1:34874 Total threads: 4
Dashboard: http://127.0.0.1:46326/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34258
Local directory: /tmp/dask-worker-space/worker-4b36_253

Worker: 11

Comm: tcp://127.0.0.1:43447 Total threads: 4
Dashboard: http://127.0.0.1:46532/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41132
Local directory: /tmp/dask-worker-space/worker-yzdykm48

Worker: 12

Comm: tcp://127.0.0.1:43467 Total threads: 4
Dashboard: http://127.0.0.1:40117/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40171
Local directory: /tmp/dask-worker-space/worker-31nmglmo

Worker: 13

Comm: tcp://127.0.0.1:39169 Total threads: 4
Dashboard: http://127.0.0.1:38131/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37991
Local directory: /tmp/dask-worker-space/worker-9tzo3ymv

Worker: 14

Comm: tcp://127.0.0.1:37128 Total threads: 4
Dashboard: http://127.0.0.1:39744/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44801
Local directory: /tmp/dask-worker-space/worker-9trrma8z

Worker: 15

Comm: tcp://127.0.0.1:45157 Total threads: 4
Dashboard: http://127.0.0.1:36145/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44854
Local directory: /tmp/dask-worker-space/worker-frs_8tlq

Worker: 16

Comm: tcp://127.0.0.1:46000 Total threads: 4
Dashboard: http://127.0.0.1:43667/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40152
Local directory: /tmp/dask-worker-space/worker-bev6d5y8

Worker: 17

Comm: tcp://127.0.0.1:34418 Total threads: 4
Dashboard: http://127.0.0.1:36311/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40317
Local directory: /tmp/dask-worker-space/worker-ps4fpt8x

Worker: 18

Comm: tcp://127.0.0.1:33070 Total threads: 4
Dashboard: http://127.0.0.1:43675/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37026
Local directory: /tmp/dask-worker-space/worker-yr9v6nhi

Worker: 19

Comm: tcp://127.0.0.1:38283 Total threads: 4
Dashboard: http://127.0.0.1:43331/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43664
Local directory: /tmp/dask-worker-space/worker-eggt3x5h

Worker: 20

Comm: tcp://127.0.0.1:42958 Total threads: 4
Dashboard: http://127.0.0.1:45158/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35139
Local directory: /tmp/dask-worker-space/worker-qoy0vuvp

Worker: 21

Comm: tcp://127.0.0.1:44821 Total threads: 4
Dashboard: http://127.0.0.1:38799/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40103
Local directory: /tmp/dask-worker-space/worker-yvq1k3ds

Worker: 22

Comm: tcp://127.0.0.1:33715 Total threads: 4
Dashboard: http://127.0.0.1:39851/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34490
Local directory: /tmp/dask-worker-space/worker-74os2tr_

Worker: 23

Comm: tcp://127.0.0.1:35421 Total threads: 4
Dashboard: http://127.0.0.1:34398/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42493
Local directory: /tmp/dask-worker-space/worker-dfil_3v7

Worker: 24

Comm: tcp://127.0.0.1:43020 Total threads: 4
Dashboard: http://127.0.0.1:44069/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38287
Local directory: /tmp/dask-worker-space/worker-btxhcy68

Worker: 25

Comm: tcp://127.0.0.1:45559 Total threads: 4
Dashboard: http://127.0.0.1:40147/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41890
Local directory: /tmp/dask-worker-space/worker-tm91nkry

Worker: 26

Comm: tcp://127.0.0.1:45348 Total threads: 4
Dashboard: http://127.0.0.1:34332/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41638
Local directory: /tmp/dask-worker-space/worker-oh3z4q8f

Worker: 27

Comm: tcp://127.0.0.1:32890 Total threads: 4
Dashboard: http://127.0.0.1:33139/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35015
Local directory: /tmp/dask-worker-space/worker-x2edokum

Worker: 28

Comm: tcp://127.0.0.1:43591 Total threads: 4
Dashboard: http://127.0.0.1:39800/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44626
Local directory: /tmp/dask-worker-space/worker-kee5d3id

Worker: 29

Comm: tcp://127.0.0.1:39998 Total threads: 4
Dashboard: http://127.0.0.1:39725/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:45207
Local directory: /tmp/dask-worker-space/worker-8atr6jf4

Worker: 30

Comm: tcp://127.0.0.1:42887 Total threads: 4
Dashboard: http://127.0.0.1:34615/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:32972
Local directory: /tmp/dask-worker-space/worker-6j_x3ye1

Worker: 31

Comm: tcp://127.0.0.1:45822 Total threads: 4
Dashboard: http://127.0.0.1:34572/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46410
Local directory: /tmp/dask-worker-space/worker-eib9fwfp

Worker: 32

Comm: tcp://127.0.0.1:37970 Total threads: 4
Dashboard: http://127.0.0.1:41419/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34677
Local directory: /tmp/dask-worker-space/worker-p9effo18

Worker: 33

Comm: tcp://127.0.0.1:40373 Total threads: 4
Dashboard: http://127.0.0.1:34571/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:38140
Local directory: /tmp/dask-worker-space/worker-gi5tfku0

Worker: 34

Comm: tcp://127.0.0.1:40842 Total threads: 4
Dashboard: http://127.0.0.1:35738/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46308
Local directory: /tmp/dask-worker-space/worker-p_hf3yu8

Worker: 35

Comm: tcp://127.0.0.1:42163 Total threads: 4
Dashboard: http://127.0.0.1:38257/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46179
Local directory: /tmp/dask-worker-space/worker-b1_zm_re

Worker: 36

Comm: tcp://127.0.0.1:40302 Total threads: 4
Dashboard: http://127.0.0.1:34843/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34597
Local directory: /tmp/dask-worker-space/worker-ddnfue2k

Worker: 37

Comm: tcp://127.0.0.1:41404 Total threads: 4
Dashboard: http://127.0.0.1:42927/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44183
Local directory: /tmp/dask-worker-space/worker-yaw9kwhr

Worker: 38

Comm: tcp://127.0.0.1:41578 Total threads: 4
Dashboard: http://127.0.0.1:35307/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34126
Local directory: /tmp/dask-worker-space/worker-ryap_0nq

Worker: 39

Comm: tcp://127.0.0.1:35561 Total threads: 4
Dashboard: http://127.0.0.1:35368/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35391
Local directory: /tmp/dask-worker-space/worker-u0606jlc

Worker: 40

Comm: tcp://127.0.0.1:45795 Total threads: 4
Dashboard: http://127.0.0.1:39422/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:34702
Local directory: /tmp/dask-worker-space/worker-dcnnuxro

Worker: 41

Comm: tcp://127.0.0.1:42081 Total threads: 4
Dashboard: http://127.0.0.1:37310/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33346
Local directory: /tmp/dask-worker-space/worker-6qnwknpi

Worker: 42

Comm: tcp://127.0.0.1:40071 Total threads: 4
Dashboard: http://127.0.0.1:33133/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39239
Local directory: /tmp/dask-worker-space/worker-dc_idu_m

Worker: 43

Comm: tcp://127.0.0.1:44893 Total threads: 4
Dashboard: http://127.0.0.1:37140/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44162
Local directory: /tmp/dask-worker-space/worker-l6puoik4

Worker: 44

Comm: tcp://127.0.0.1:39951 Total threads: 4
Dashboard: http://127.0.0.1:38682/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40910
Local directory: /tmp/dask-worker-space/worker-55unxxpb

Worker: 45

Comm: tcp://127.0.0.1:40579 Total threads: 4
Dashboard: http://127.0.0.1:42657/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:43335
Local directory: /tmp/dask-worker-space/worker-wgri4elc

Worker: 46

Comm: tcp://127.0.0.1:45120 Total threads: 4
Dashboard: http://127.0.0.1:44430/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42162
Local directory: /tmp/dask-worker-space/worker-ll8tcfmy

Worker: 47

Comm: tcp://127.0.0.1:45546 Total threads: 4
Dashboard: http://127.0.0.1:39370/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35019
Local directory: /tmp/dask-worker-space/worker-ygf4jgc7

Worker: 48

Comm: tcp://127.0.0.1:43860 Total threads: 4
Dashboard: http://127.0.0.1:42809/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35318
Local directory: /tmp/dask-worker-space/worker-8mkzezgi

Worker: 49

Comm: tcp://127.0.0.1:36926 Total threads: 4
Dashboard: http://127.0.0.1:36330/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:37771
Local directory: /tmp/dask-worker-space/worker-agmks3vs

Worker: 50

Comm: tcp://127.0.0.1:33961 Total threads: 4
Dashboard: http://127.0.0.1:41851/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:39989
Local directory: /tmp/dask-worker-space/worker-kuiktiwj

Worker: 51

Comm: tcp://127.0.0.1:32968 Total threads: 4
Dashboard: http://127.0.0.1:42830/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41506
Local directory: /tmp/dask-worker-space/worker-2d2g7xsy

Worker: 52

Comm: tcp://127.0.0.1:33164 Total threads: 4
Dashboard: http://127.0.0.1:45712/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46643
Local directory: /tmp/dask-worker-space/worker-r8b8s6x8

Worker: 53

Comm: tcp://127.0.0.1:36687 Total threads: 4
Dashboard: http://127.0.0.1:43506/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:32915
Local directory: /tmp/dask-worker-space/worker-js5muaqr

Worker: 54

Comm: tcp://127.0.0.1:39400 Total threads: 4
Dashboard: http://127.0.0.1:37974/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:36239
Local directory: /tmp/dask-worker-space/worker-qy9qeu5y

Worker: 55

Comm: tcp://127.0.0.1:46116 Total threads: 4
Dashboard: http://127.0.0.1:46555/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40227
Local directory: /tmp/dask-worker-space/worker-z_93iphu

Worker: 56

Comm: tcp://127.0.0.1:44324 Total threads: 4
Dashboard: http://127.0.0.1:36744/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:33031
Local directory: /tmp/dask-worker-space/worker-gq489l3y

Worker: 57

Comm: tcp://127.0.0.1:33856 Total threads: 4
Dashboard: http://127.0.0.1:44814/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:44186
Local directory: /tmp/dask-worker-space/worker-jqgz5wgv

Worker: 58

Comm: tcp://127.0.0.1:34705 Total threads: 4
Dashboard: http://127.0.0.1:36511/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42574
Local directory: /tmp/dask-worker-space/worker-435ze5ku

Worker: 59

Comm: tcp://127.0.0.1:44657 Total threads: 4
Dashboard: http://127.0.0.1:34964/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:35164
Local directory: /tmp/dask-worker-space/worker-4j9uvd2x

Worker: 60

Comm: tcp://127.0.0.1:42374 Total threads: 4
Dashboard: http://127.0.0.1:39405/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:42369
Local directory: /tmp/dask-worker-space/worker-a32q63t_

Worker: 61

Comm: tcp://127.0.0.1:41014 Total threads: 4
Dashboard: http://127.0.0.1:40271/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:41380
Local directory: /tmp/dask-worker-space/worker-byn65e8a

Worker: 62

Comm: tcp://127.0.0.1:40055 Total threads: 4
Dashboard: http://127.0.0.1:42335/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:46316
Local directory: /tmp/dask-worker-space/worker-jnfslp6p

Worker: 63

Comm: tcp://127.0.0.1:35560 Total threads: 4
Dashboard: http://127.0.0.1:37840/status Memory: 3.92 GiB
Nanny: tcp://127.0.0.1:40481
Local directory: /tmp/dask-worker-space/worker-9j3v_jig

read plotting information from a csv file¶

In [4]:
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Out[4]:
Value Inputs Equation Zone Plot Colourmap MinMax Unit Oldname Unnamed: 10
SSH_anomaly gridT-2D.ssh calc.SSH_anomaly(data) ALL maps Spectral_r (-0.35,0.35) m M-2

Computation starts here¶

Each computation consists of

  1. Load NEMO data set
  2. Zoom data set
  3. Compute (or load computed data set)
  4. Save
  5. Plot
  6. Close
In [5]:
%%time
import os
calcswitch=os.environ.get('calc', 'True') 
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any()) 
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0 
data
calcswitch= True df.Inputs != nothing True lazy= False
../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  ../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  <bound method DataSourceBase.describe of sources:
  param_xios:
    args:
      combine: nested
      concat_dim: y
      urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc
      xarray_kwargs:
        compat: override
        coords: minimal
        data_vars: minimal
        parallel: true
    description: SEDNA NEMO parameters from MPI output  nav_lon lat fails
    driver: intake_xarray.netcdf.NetCDFSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
{'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output  nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}}
0 read gridT-2D ['ssh']
lazy= False
using load_data_xios_kerchunk reading  gridT-2D
using load_data_xios_kerchunk reading  <bound method DataSourceBase.describe of sources:
  data_xios_kerchunk:
    args:
      consolidated: false
      storage_options:
        fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201201/gridT-2D_0[0-5][0-9][0-9].json
        target_protocol: file
      urlpath: reference://
    description: CREG025 NEMO outputs from different xios server in kerchunk format
    driver: intake_xarray.xzarr.ZarrSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
File <timed exec>:6, in <module>

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:677, in datas(catalog_url, dfi, month, year, daskreport, lazy)
    672 datadict, paramdict = getdict(dfi)
    673 #print('datadict:',datadict)
    674 #if datadict == {}:
    675 #    data=0
    676 #else:
--> 677 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 
    678 for s in paramdict:
    679     print('param',s,'will be included in data')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:496, in outputs(catalog_url, datadict, month, year, daskreport, lazy)
    493 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"):
    494     #ds=load_data_xios_patch(cat,filename,month,catalog_url) 
    495     print("lazy=",lazy)
--> 496     ds = load_data_xios(cat,filename,items,month,year) if ('True' in lazy)  else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True)
    497 extime=time.time() - start
    498 print('      took', extime, 'seconds')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:467, in load_data_xios_kerchunk(cat, filename, items, month, year, rome)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
--> 467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
    470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:470, in <listcomp>(.0)
    465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    466 print('using load_data_xios_kerchunk reading ',desc)
    467 ds_x= [ prep(
    468     cat.data_xios_kerchunk(
    469         file=filename,month=month,year=year,eio=f'{xios:04}' 
--> 470     ).to_dask().drop_vars(dro,errors='ignore')[items])
    471        for xios in xioss]
    473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self)
     67 def to_dask(self):
     68     """Return xarray object where variables are dask arrays"""
---> 69     return self.read_chunked()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self)
     42 def read_chunked(self):
     43     """Return xarray object (which will have chunks)"""
---> 44     self._load_metadata()
     45     return self._ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self)
    234 """load metadata only if needed"""
    235 if self._schema is None:
--> 236     self._schema = self._get_schema()
    237     self.dtype = self._schema.dtype
    238     self.shape = self._schema.shape

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self)
     15 self.urlpath = self._get_cache(self.urlpath)[0]
     17 if self._ds is None:
---> 18     self._open_dataset()
     20     metadata = {
     21         'dims': dict(self._ds.dims),
     22         'data_vars': {k: list(self._ds[k].coords)
     23                       for k in self._ds.data_vars.keys()},
     24         'coords': tuple(self._ds.coords.keys()),
     25     }
     26     if getattr(self, 'on_server', False):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/xzarr.py:46, in ZarrSource._open_dataset(self)
     44     self._ds = xr.open_mfdataset(self.urlpath, **kw)
     45 else:
---> 46     self._ds = xr.open_dataset(self.urlpath, **kw)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, backend_kwargs, **kwargs)
    519 decoders = _resolve_decoders_kwargs(
    520     decode_cf,
    521     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    527     decode_coords=decode_coords,
    528 )
    530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 531 backend_ds = backend.open_dataset(
    532     filename_or_obj,
    533     drop_variables=drop_variables,
    534     **decoders,
    535     **kwargs,
    536 )
    537 ds = _dataset_from_backend_dataset(
    538     backend_ds,
    539     filename_or_obj,
   (...)
    547     **kwargs,
    548 )
    549 return ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:837, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel)
    817 def open_dataset(
    818     self,
    819     filename_or_obj,
   (...)
    833     stacklevel=3,
    834 ):
    836     filename_or_obj = _normalize_path(filename_or_obj)
--> 837     store = ZarrStore.open_group(
    838         filename_or_obj,
    839         group=group,
    840         mode=mode,
    841         synchronizer=synchronizer,
    842         consolidated=consolidated,
    843         consolidate_on_close=False,
    844         chunk_store=chunk_store,
    845         storage_options=storage_options,
    846         stacklevel=stacklevel + 1,
    847     )
    849     store_entrypoint = StoreBackendEntrypoint()
    850     with close_on_error(store):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:406, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel)
    404     zarr_group = zarr.open_consolidated(store, **open_kwargs)
    405 else:
--> 406     zarr_group = zarr.open_group(store, **open_kwargs)
    407 return cls(
    408     zarr_group,
    409     mode,
   (...)
    413     safe_chunks,
    414 )

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1316, in open_group(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version)
   1270 """Open a group using file-mode-like semantics.
   1271 
   1272 Parameters
   (...)
   1312 
   1313 """
   1315 # handle polymorphic store arg
-> 1316 store = _normalize_store_arg(
   1317     store, storage_options=storage_options, mode=mode,
   1318     zarr_version=zarr_version)
   1319 if zarr_version is None:
   1320     zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1192, in _normalize_store_arg(store, storage_options, mode, zarr_version)
   1190 if store is None:
   1191     return MemoryStore() if zarr_version == 2 else MemoryStoreV3()
-> 1192 return normalize_store_arg(store,
   1193                            storage_options=storage_options, mode=mode,
   1194                            zarr_version=zarr_version)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version)
    168     from zarr._storage.v3 import _normalize_store_arg_v3
    169     normalize_store = _normalize_store_arg_v3
--> 170 return normalize_store(store, storage_options, mode)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode)
    141 if isinstance(store, str):
    142     if "://" in store or "::" in store:
--> 143         return FSStore(store, mode=mode, **(storage_options or {}))
    144     elif storage_options:
    145         raise ValueError("storage_options passed with non-fsspec path")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.__init__(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options)
   1319 if protocol in (None, "file") and not storage_options.get("auto_mkdir"):
   1320     storage_options["auto_mkdir"] = True
-> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options})
   1322 self.fs = self.map.fs  # for direct operations
   1323 self.path = self.fs._strip_protocol(url)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs)
    199 """Create key-value interface for given URL and options
    200 
    201 The URL will be of the form "protocol://location" and point to the root
   (...)
    227 ``FSMap`` instance, the dict-like key-value store.
    228 """
    229 # Removing protocol here - could defer to each open() on the backend
--> 230 fs, urlpath = url_to_fs(url, **kwargs)
    231 root = alternate_root if alternate_root is not None else urlpath
    232 return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:412, in url_to_fs(url, **kwargs)
    410     options = cls._get_kwargs_from_urls(url)
    411     update_storage_options(options, kwargs)
--> 412     fs = cls(**options)
    413     urlpath = fs._strip_protocol(url)
    414 return fs, urlpath

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:76, in _Cached.__call__(cls, *args, **kwargs)
     74     return cls._cache[token]
     75 else:
---> 76     obj = super().__call__(*args, **kwargs)
     77     # Setting _fs_token here causes some static linters to complain.
     78     obj._fs_token_ = token

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/reference.py:140, in ReferenceFileSystem.__init__(self, fo, target, ref_storage_args, target_protocol, target_options, remote_protocol, remote_options, fs, template_overrides, simple_templates, loop, **kwargs)
    138 dic = dict(**(ref_storage_args or target_options or {}), **extra)
    139 # text JSON
--> 140 with open(fo, "rb", **dic) as f:
    141     logger.info("Read reference from URL %s", fo)
    142     text = f.read()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:104, in OpenFile.__enter__(self)
    101 def __enter__(self):
    102     mode = self.mode.replace("t", "").replace("b", "") + "b"
--> 104     f = self.fs.open(self.path, mode=mode)
    106     self.fobjects = [f]
    108     if self.compression is not None:

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:1037, in AbstractFileSystem.open(self, path, mode, block_size, cache_options, compression, **kwargs)
   1035 else:
   1036     ac = kwargs.pop("autocommit", not self._intrans)
-> 1037     f = self._open(
   1038         path,
   1039         mode=mode,
   1040         block_size=block_size,
   1041         autocommit=ac,
   1042         cache_options=cache_options,
   1043         **kwargs,
   1044     )
   1045     if compression is not None:
   1046         from fsspec.compression import compr

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:159, in LocalFileSystem._open(self, path, mode, block_size, **kwargs)
    157 if self.auto_mkdir and "w" in mode:
    158     self.makedirs(self._parent(path), exist_ok=True)
--> 159 return LocalFileOpener(path, mode, fs=self, **kwargs)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:254, in LocalFileOpener.__init__(self, path, mode, autocommit, fs, compression, **kwargs)
    252 self.compression = get_compression(path, compression)
    253 self.blocksize = io.DEFAULT_BUFFER_SIZE
--> 254 self._open()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:259, in LocalFileOpener._open(self)
    257 if self.f is None or self.f.closed:
    258     if self.autocommit or "w" not in self.mode:
--> 259         self.f = open(self.path, mode=self.mode)
    260         if self.compression:
    261             compress = compr[self.compression]

FileNotFoundError: [Errno 2] No such file or directory: '//ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201201/gridT-2D_0000.json'
In [6]:
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
            )
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
File <timed eval>:1, in <module>

NameError: name 'data' is not defined