In [1]:
%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()

from core import  load, zoom, calc, save,plots,monitor
In [2]:
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
Out[2]:
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
In [3]:
# 'month':  = 'JOBID' almost month but not really, 

# If you submit the job with job scheduler, above

#below are list of enviroment variable one can pass
#%env local='2"
# local : if True  run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.  
# ychunk=10 will group the original netcdf file to 10 by 10 
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH 
# name of control file to be used for computation/plots/save/ 
#%env file_exp= 
# 'file_exp': Which 'experiment' name is it? 
#.    this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date'  having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring  this corresponds to file path path-XIOS.{month}/
#
#%env save=   proceed saving?   True or False  , Default is setted as True 
#%env plot=   proceed plotting?  True or False , Default is setted as True 
#%env calc=   proceed computation? or just load computed result? True or False , Default is setted as True 
#%env save=False
#%env lazy=False
In [4]:
%%time
# 'savefig': Do we save output in html? or not. keep it true. 
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local 20
local false ,workers 20
using host= irene6079.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= False workers 20
10000000000
False
mg1: rome dask jobqueue
rome, slurmcluster starting
This code is running on  irene6079.c-irene.mg1.tgcc.ccc.cea.fr using  SEDNA_DELTA_MONITOR file experiment, read from  ../lib/SEDNA_DELTA_MONITOR.yaml  on year= 2012  on month= 0[1-2]  outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6413750irene6079.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_0[1-2]M_MLD_2D/
CPU times: user 2.56 s, sys: 645 ms, total: 3.21 s
Wall time: 2min 31s
Out[4]:

Client

Client-8c6cc4be-1344-11ed-81d0-080038b93b5d

Connection method: Cluster object Cluster type: dask_jobqueue.SLURMCluster
Dashboard: http://10.131.14.138:8787/status

Cluster Info

SLURMCluster

1fb46258

Dashboard: http://10.131.14.138:8787/status Workers: 28
Total threads: 252 Total memory: 428.40 GiB

Scheduler Info

Scheduler

Scheduler-2ab2b204-5d8a-42e2-a5e0-a8ae543ef2fa

Comm: tcp://10.131.14.138:44214 Workers: 28
Dashboard: http://10.131.14.138:8787/status Total threads: 252
Started: 2 minutes ago Total memory: 428.40 GiB

Workers

Worker: SLURMCluster-0-0

Comm: tcp://10.131.1.191:43369 Total threads: 9
Dashboard: http://10.131.1.191:39102/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:35613
Local directory: /tmp/dask-worker-space/worker-kd4dpg6t

Worker: SLURMCluster-0-1

Comm: tcp://10.131.1.191:43159 Total threads: 9
Dashboard: http://10.131.1.191:40242/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:46334
Local directory: /tmp/dask-worker-space/worker-gvhqoi3w

Worker: SLURMCluster-0-10

Comm: tcp://10.131.1.191:40089 Total threads: 9
Dashboard: http://10.131.1.191:41719/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:38506
Local directory: /tmp/dask-worker-space/worker-mwo1awa9

Worker: SLURMCluster-0-11

Comm: tcp://10.131.1.191:33063 Total threads: 9
Dashboard: http://10.131.1.191:46104/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:39436
Local directory: /tmp/dask-worker-space/worker-9sipdzd6

Worker: SLURMCluster-0-12

Comm: tcp://10.131.1.191:39018 Total threads: 9
Dashboard: http://10.131.1.191:36789/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:33043
Local directory: /tmp/dask-worker-space/worker-arruvvsh

Worker: SLURMCluster-0-13

Comm: tcp://10.131.1.191:36919 Total threads: 9
Dashboard: http://10.131.1.191:39249/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:45801
Local directory: /tmp/dask-worker-space/worker-8d17ejzv

Worker: SLURMCluster-0-2

Comm: tcp://10.131.1.191:32981 Total threads: 9
Dashboard: http://10.131.1.191:46703/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:46810
Local directory: /tmp/dask-worker-space/worker-hs9gnxxm

Worker: SLURMCluster-0-3

Comm: tcp://10.131.1.191:44803 Total threads: 9
Dashboard: http://10.131.1.191:41083/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:37009
Local directory: /tmp/dask-worker-space/worker-nxogzt38

Worker: SLURMCluster-0-4

Comm: tcp://10.131.1.191:41332 Total threads: 9
Dashboard: http://10.131.1.191:41135/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:41924
Local directory: /tmp/dask-worker-space/worker-3ctpycwk

Worker: SLURMCluster-0-5

Comm: tcp://10.131.1.191:32790 Total threads: 9
Dashboard: http://10.131.1.191:37332/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:39030
Local directory: /tmp/dask-worker-space/worker-zvva6fz_

Worker: SLURMCluster-0-6

Comm: tcp://10.131.1.191:42082 Total threads: 9
Dashboard: http://10.131.1.191:42559/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:37396
Local directory: /tmp/dask-worker-space/worker-oe8nzn0t

Worker: SLURMCluster-0-7

Comm: tcp://10.131.1.191:46102 Total threads: 9
Dashboard: http://10.131.1.191:35849/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:45655
Local directory: /tmp/dask-worker-space/worker-xnq8izbc

Worker: SLURMCluster-0-8

Comm: tcp://10.131.1.191:41069 Total threads: 9
Dashboard: http://10.131.1.191:33280/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:34918
Local directory: /tmp/dask-worker-space/worker-k91dwxxz

Worker: SLURMCluster-0-9

Comm: tcp://10.131.1.191:42150 Total threads: 9
Dashboard: http://10.131.1.191:42648/status Memory: 15.30 GiB
Nanny: tcp://10.131.1.191:41929
Local directory: /tmp/dask-worker-space/worker-e1itahhu

Worker: SLURMCluster-1-0

Comm: tcp://10.131.14.149:40332 Total threads: 9
Dashboard: http://10.131.14.149:40723/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:41188
Local directory: /tmp/dask-worker-space/worker-d_mob4q6

Worker: SLURMCluster-1-1

Comm: tcp://10.131.14.149:40086 Total threads: 9
Dashboard: http://10.131.14.149:37202/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:42868
Local directory: /tmp/dask-worker-space/worker-vpwutbqq

Worker: SLURMCluster-1-10

Comm: tcp://10.131.14.149:34037 Total threads: 9
Dashboard: http://10.131.14.149:38006/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:33651
Local directory: /tmp/dask-worker-space/worker-ravpr7sq

Worker: SLURMCluster-1-11

Comm: tcp://10.131.14.149:40669 Total threads: 9
Dashboard: http://10.131.14.149:39166/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:41743
Local directory: /tmp/dask-worker-space/worker-3m_wa3_y

Worker: SLURMCluster-1-12

Comm: tcp://10.131.14.149:34146 Total threads: 9
Dashboard: http://10.131.14.149:35446/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:39610
Local directory: /tmp/dask-worker-space/worker-nzro1s12

Worker: SLURMCluster-1-13

Comm: tcp://10.131.14.149:35102 Total threads: 9
Dashboard: http://10.131.14.149:42323/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:41014
Local directory: /tmp/dask-worker-space/worker-4rl51n73

Worker: SLURMCluster-1-2

Comm: tcp://10.131.14.149:42703 Total threads: 9
Dashboard: http://10.131.14.149:39612/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:41991
Local directory: /tmp/dask-worker-space/worker-eis1x1kd

Worker: SLURMCluster-1-3

Comm: tcp://10.131.14.149:46830 Total threads: 9
Dashboard: http://10.131.14.149:38370/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:38191
Local directory: /tmp/dask-worker-space/worker-yd06uw8a

Worker: SLURMCluster-1-4

Comm: tcp://10.131.14.149:33606 Total threads: 9
Dashboard: http://10.131.14.149:43003/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:32777
Local directory: /tmp/dask-worker-space/worker-raza6md2

Worker: SLURMCluster-1-5

Comm: tcp://10.131.14.149:35625 Total threads: 9
Dashboard: http://10.131.14.149:43172/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:35812
Local directory: /tmp/dask-worker-space/worker-u3ee54i3

Worker: SLURMCluster-1-6

Comm: tcp://10.131.14.149:41757 Total threads: 9
Dashboard: http://10.131.14.149:43019/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:38670
Local directory: /tmp/dask-worker-space/worker-dg0ybfb4

Worker: SLURMCluster-1-7

Comm: tcp://10.131.14.149:43816 Total threads: 9
Dashboard: http://10.131.14.149:44919/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:45447
Local directory: /tmp/dask-worker-space/worker-kowiutiq

Worker: SLURMCluster-1-8

Comm: tcp://10.131.14.149:39404 Total threads: 9
Dashboard: http://10.131.14.149:42488/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:33553
Local directory: /tmp/dask-worker-space/worker-q9msa13i

Worker: SLURMCluster-1-9

Comm: tcp://10.131.14.149:33208 Total threads: 9
Dashboard: http://10.131.14.149:38048/status Memory: 15.30 GiB
Nanny: tcp://10.131.14.149:46800
Local directory: /tmp/dask-worker-space/worker-thrjtjqp

read plotting information from a csv file¶

In [5]:
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Out[5]:
Value Inputs Equation Zone Plot Colourmap MinMax Unit Oldname Unnamed: 10
MLD_2D gridT-2D.samldr1_1 data.samldr1_1 ALL maps Blues (0,80) m M-5

Computation starts here¶

Each computation consists of

  1. Load NEMO data set
  2. Zoom data set
  3. Compute (or load computed data set)
  4. Save
  5. Plot
  6. Close
In [6]:
%%time
import os
calcswitch=os.environ.get('calc', 'True') 
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any()) 
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0 
data
calcswitch= True df.Inputs != nothing True lazy= False
../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  ../lib/SEDNA_DELTA_MONITOR.yaml
using param_xios reading  <bound method DataSourceBase.describe of sources:
  param_xios:
    args:
      combine: nested
      concat_dim: y
      urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc
      xarray_kwargs:
        compat: override
        coords: minimal
        data_vars: minimal
        parallel: true
    description: SEDNA NEMO parameters from MPI output  nav_lon lat fails
    driver: intake_xarray.netcdf.NetCDFSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
{'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output  nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}}
0 read gridT-2D ['samldr1_1']
using load_data_xios_kerchunk reading  gridT-2D
using load_data_xios_kerchunk reading  <bound method DataSourceBase.describe of sources:
  data_xios_kerchunk:
    args:
      consolidated: false
      storage_options:
        fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/20120[1-2]/gridT-2D_0[0-5][0-9][0-9].json
        target_protocol: file
      urlpath: reference://
    description: CREG025 NEMO outputs from different xios server in kerchunk format
    driver: intake_xarray.xzarr.ZarrSource
    metadata:
      catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/
>
---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
File <timed exec>:6, in <module>

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:629, in datas(catalog_url, dfi, month, year, daskreport, lazy)
    624 datadict, paramdict = getdict(dfi)
    625 #print('datadict:',datadict)
    626 #if datadict == {}:
    627 #    data=0
    628 #else:
--> 629 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 
    630 for s in paramdict:
    631     print('param',s,'will be included in data')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:452, in outputs(catalog_url, datadict, month, year, daskreport, lazy)
    448 start = time.time()
    449 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"):
    450     #ds=load_data_xios_patch(cat,filename,month,catalog_url) 
--> 452     ds = load_data_xios(cat,filename,items,month,year) if not ('False' in lazy)  else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True)
    453 extime=time.time() - start
    454 print('      took', extime, 'seconds')

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:423, in load_data_xios_kerchunk(cat, filename, items, month, year, rome)
    421 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    422 print('using load_data_xios_kerchunk reading ',desc)
--> 423 ds_x= [ prep(
    424     cat.data_xios_kerchunk(
    425         file=filename,month=month,year=year,eio=f'{xios:04}' 
    426     ).to_dask().drop_vars(dro,errors='ignore'))[items]
    427        for xios in xioss]
    429 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:426, in <listcomp>(.0)
    421 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe         
    422 print('using load_data_xios_kerchunk reading ',desc)
    423 ds_x= [ prep(
    424     cat.data_xios_kerchunk(
    425         file=filename,month=month,year=year,eio=f'{xios:04}' 
--> 426     ).to_dask().drop_vars(dro,errors='ignore'))[items]
    427        for xios in xioss]
    429 return xr.concat(ds_x,dim='y',compat="override",coords="minimal")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self)
     67 def to_dask(self):
     68     """Return xarray object where variables are dask arrays"""
---> 69     return self.read_chunked()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self)
     42 def read_chunked(self):
     43     """Return xarray object (which will have chunks)"""
---> 44     self._load_metadata()
     45     return self._ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self)
    234 """load metadata only if needed"""
    235 if self._schema is None:
--> 236     self._schema = self._get_schema()
    237     self.dtype = self._schema.dtype
    238     self.shape = self._schema.shape

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self)
     15 self.urlpath = self._get_cache(self.urlpath)[0]
     17 if self._ds is None:
---> 18     self._open_dataset()
     20     metadata = {
     21         'dims': dict(self._ds.dims),
     22         'data_vars': {k: list(self._ds[k].coords)
     23                       for k in self._ds.data_vars.keys()},
     24         'coords': tuple(self._ds.coords.keys()),
     25     }
     26     if getattr(self, 'on_server', False):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/xzarr.py:46, in ZarrSource._open_dataset(self)
     44     self._ds = xr.open_mfdataset(self.urlpath, **kw)
     45 else:
---> 46     self._ds = xr.open_dataset(self.urlpath, **kw)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, backend_kwargs, **kwargs)
    519 decoders = _resolve_decoders_kwargs(
    520     decode_cf,
    521     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    527     decode_coords=decode_coords,
    528 )
    530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 531 backend_ds = backend.open_dataset(
    532     filename_or_obj,
    533     drop_variables=drop_variables,
    534     **decoders,
    535     **kwargs,
    536 )
    537 ds = _dataset_from_backend_dataset(
    538     backend_ds,
    539     filename_or_obj,
   (...)
    547     **kwargs,
    548 )
    549 return ds

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:837, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel)
    817 def open_dataset(
    818     self,
    819     filename_or_obj,
   (...)
    833     stacklevel=3,
    834 ):
    836     filename_or_obj = _normalize_path(filename_or_obj)
--> 837     store = ZarrStore.open_group(
    838         filename_or_obj,
    839         group=group,
    840         mode=mode,
    841         synchronizer=synchronizer,
    842         consolidated=consolidated,
    843         consolidate_on_close=False,
    844         chunk_store=chunk_store,
    845         storage_options=storage_options,
    846         stacklevel=stacklevel + 1,
    847     )
    849     store_entrypoint = StoreBackendEntrypoint()
    850     with close_on_error(store):

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:406, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel)
    404     zarr_group = zarr.open_consolidated(store, **open_kwargs)
    405 else:
--> 406     zarr_group = zarr.open_group(store, **open_kwargs)
    407 return cls(
    408     zarr_group,
    409     mode,
   (...)
    413     safe_chunks,
    414 )

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1316, in open_group(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version)
   1270 """Open a group using file-mode-like semantics.
   1271 
   1272 Parameters
   (...)
   1312 
   1313 """
   1315 # handle polymorphic store arg
-> 1316 store = _normalize_store_arg(
   1317     store, storage_options=storage_options, mode=mode,
   1318     zarr_version=zarr_version)
   1319 if zarr_version is None:
   1320     zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1192, in _normalize_store_arg(store, storage_options, mode, zarr_version)
   1190 if store is None:
   1191     return MemoryStore() if zarr_version == 2 else MemoryStoreV3()
-> 1192 return normalize_store_arg(store,
   1193                            storage_options=storage_options, mode=mode,
   1194                            zarr_version=zarr_version)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version)
    168     from zarr._storage.v3 import _normalize_store_arg_v3
    169     normalize_store = _normalize_store_arg_v3
--> 170 return normalize_store(store, storage_options, mode)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode)
    141 if isinstance(store, str):
    142     if "://" in store or "::" in store:
--> 143         return FSStore(store, mode=mode, **(storage_options or {}))
    144     elif storage_options:
    145         raise ValueError("storage_options passed with non-fsspec path")

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.__init__(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options)
   1319 if protocol in (None, "file") and not storage_options.get("auto_mkdir"):
   1320     storage_options["auto_mkdir"] = True
-> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options})
   1322 self.fs = self.map.fs  # for direct operations
   1323 self.path = self.fs._strip_protocol(url)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs)
    199 """Create key-value interface for given URL and options
    200 
    201 The URL will be of the form "protocol://location" and point to the root
   (...)
    227 ``FSMap`` instance, the dict-like key-value store.
    228 """
    229 # Removing protocol here - could defer to each open() on the backend
--> 230 fs, urlpath = url_to_fs(url, **kwargs)
    231 root = alternate_root if alternate_root is not None else urlpath
    232 return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions)

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:412, in url_to_fs(url, **kwargs)
    410     options = cls._get_kwargs_from_urls(url)
    411     update_storage_options(options, kwargs)
--> 412     fs = cls(**options)
    413     urlpath = fs._strip_protocol(url)
    414 return fs, urlpath

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:76, in _Cached.__call__(cls, *args, **kwargs)
     74     return cls._cache[token]
     75 else:
---> 76     obj = super().__call__(*args, **kwargs)
     77     # Setting _fs_token here causes some static linters to complain.
     78     obj._fs_token_ = token

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/reference.py:140, in ReferenceFileSystem.__init__(self, fo, target, ref_storage_args, target_protocol, target_options, remote_protocol, remote_options, fs, template_overrides, simple_templates, loop, **kwargs)
    138 dic = dict(**(ref_storage_args or target_options or {}), **extra)
    139 # text JSON
--> 140 with open(fo, "rb", **dic) as f:
    141     logger.info("Read reference from URL %s", fo)
    142     text = f.read()

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:467, in open(urlpath, mode, compression, encoding, errors, protocol, newline, **kwargs)
    417 def open(
    418     urlpath,
    419     mode="rb",
   (...)
    425     **kwargs,
    426 ):
    427     """Given a path or paths, return one ``OpenFile`` object.
    428 
    429     Parameters
   (...)
    465     ``OpenFile`` object.
    466     """
--> 467     return open_files(
    468         urlpath=[urlpath],
    469         mode=mode,
    470         compression=compression,
    471         encoding=encoding,
    472         errors=errors,
    473         protocol=protocol,
    474         newline=newline,
    475         expand=False,
    476         **kwargs,
    477     )[0]

File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:213, in OpenFiles.__getitem__(self, item)
    212 def __getitem__(self, item):
--> 213     out = super().__getitem__(item)
    214     if isinstance(item, slice):
    215         return OpenFiles(out, mode=self.mode, fs=self.fs)

IndexError: list index out of range
In [7]:
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
            )
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
File <timed eval>:1, in <module>

NameError: name 'data' is not defined