%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
If you submit the job with job scheduler; below are list of enviroment variable one can pass
local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'
%env ychunk='2', #%env tchunk='2'
controls chunk. 'False' sets no modification from original netcdf file's chunk.
ychunk=10 will group the original netcdf file to 10 by 10
tchunk=1 will chunk the time coordinate one by one
%env file_exp=
'file_exp': Which 'experiment' name is it? this corresopnds to intake catalog name without path and .yaml
#%env year=
for Validation, this correspoinds to path/year/month 's year for monitoring, this corresponids to 'date' having means do all files in the monitoring directory setting it as 0[0-9] &1[0-9]& [2-3][0-9], the job can be separated in three lots. For DELTA experiment, year corresponds to really 'year'
%env month=
for monitoring this corresponds to file path path-XIOS.{month}/
For DELTA experiment, year corresponds to really 'month'
proceed saving? True or False , Default is setted as True
proceed plotting? True or False , Default is setted as True
proceed computation? or just load computed result? True or False , Default is setted as True
save output file used for plotting
using kerchunked file -> False, not using kerhcunk -> True
name of control file to be used for computation/plots/save/ We have number of M_xxx.csv
Monitor.sh calls M_MLD_2D
and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh, Integrals.sh , Sections.sh
M_AWTMD
M_Fluxnet
M_Ice_quantities
M_IceClim M_IceConce M_IceThick
M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly
M_Mean_temp_velo M_Mooring
M_Sectionx M_Sectiony
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene5371.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 rome local cluster starting This code is running on irene5371.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 04 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6462408irene5371.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_04M_SSH_anomaly/ CPU times: user 546 ms, sys: 132 ms, total: 678 ms Wall time: 20.8 s
Client-1af74aff-1807-11ed-a632-080038b93cd9
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
24b01924
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-8288fc97-d8e9-40e1-a259-2ac871981d7a
Comm: tcp://127.0.0.1:34256 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:37262 | Total threads: 8 |
Dashboard: http://127.0.0.1:37583/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34798 | |
Local directory: /tmp/dask-worker-space/worker-g_xehzdz |
Comm: tcp://127.0.0.1:38922 | Total threads: 8 |
Dashboard: http://127.0.0.1:32971/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40426 | |
Local directory: /tmp/dask-worker-space/worker-fjw6iqdr |
Comm: tcp://127.0.0.1:44625 | Total threads: 8 |
Dashboard: http://127.0.0.1:38299/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38114 | |
Local directory: /tmp/dask-worker-space/worker-zybmmpa_ |
Comm: tcp://127.0.0.1:45698 | Total threads: 8 |
Dashboard: http://127.0.0.1:34808/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44802 | |
Local directory: /tmp/dask-worker-space/worker-szficrk7 |
Comm: tcp://127.0.0.1:46146 | Total threads: 8 |
Dashboard: http://127.0.0.1:45169/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36391 | |
Local directory: /tmp/dask-worker-space/worker-i1a6fr43 |
Comm: tcp://127.0.0.1:45888 | Total threads: 8 |
Dashboard: http://127.0.0.1:43600/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38452 | |
Local directory: /tmp/dask-worker-space/worker-9ui6ivfp |
Comm: tcp://127.0.0.1:40417 | Total threads: 8 |
Dashboard: http://127.0.0.1:39170/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45885 | |
Local directory: /tmp/dask-worker-space/worker-bteuz46x |
Comm: tcp://127.0.0.1:43551 | Total threads: 8 |
Dashboard: http://127.0.0.1:38363/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34290 | |
Local directory: /tmp/dask-worker-space/worker-63dpllrv |
Comm: tcp://127.0.0.1:45865 | Total threads: 8 |
Dashboard: http://127.0.0.1:46394/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36088 | |
Local directory: /tmp/dask-worker-space/worker-xte5tcit |
Comm: tcp://127.0.0.1:40006 | Total threads: 8 |
Dashboard: http://127.0.0.1:36775/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35198 | |
Local directory: /tmp/dask-worker-space/worker-dm_rp9xw |
Comm: tcp://127.0.0.1:35097 | Total threads: 8 |
Dashboard: http://127.0.0.1:35405/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:42928 | |
Local directory: /tmp/dask-worker-space/worker-x0p7klxv |
Comm: tcp://127.0.0.1:42448 | Total threads: 8 |
Dashboard: http://127.0.0.1:35585/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:41049 | |
Local directory: /tmp/dask-worker-space/worker-ujdvgebz |
Comm: tcp://127.0.0.1:40896 | Total threads: 8 |
Dashboard: http://127.0.0.1:45851/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:41162 | |
Local directory: /tmp/dask-worker-space/worker-29dxnvdd |
Comm: tcp://127.0.0.1:34584 | Total threads: 8 |
Dashboard: http://127.0.0.1:39221/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:41581 | |
Local directory: /tmp/dask-worker-space/worker-59yj_ng4 |
Comm: tcp://127.0.0.1:37926 | Total threads: 8 |
Dashboard: http://127.0.0.1:34867/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45238 | |
Local directory: /tmp/dask-worker-space/worker-b64vbpy_ |
Comm: tcp://127.0.0.1:36525 | Total threads: 8 |
Dashboard: http://127.0.0.1:43791/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36301 | |
Local directory: /tmp/dask-worker-space/worker-4y8lga_p |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
SSH_anomaly | gridT-2D.ssh | calc.SSH_anomaly(data) | ALL | maps | Spectral_r | (-0.35,0.35) | m | M-2 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing True lazy= False ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading <bound method DataSourceBase.describe of sources: param_xios: args: combine: nested concat_dim: y urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc xarray_kwargs: compat: override coords: minimal data_vars: minimal parallel: true description: SEDNA NEMO parameters from MPI output nav_lon lat fails driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > {'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}} 0 read gridT-2D ['ssh'] lazy= False using load_data_xios_kerchunk reading gridT-2D using load_data_xios_kerchunk reading <bound method DataSourceBase.describe of sources: data_xios_kerchunk: args: consolidated: false storage_options: fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201204/gridT-2D_0[0-5][0-9][0-9].json target_protocol: file urlpath: reference:// description: CREG025 NEMO outputs from different xios server in kerchunk format driver: intake_xarray.xzarr.ZarrSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ >
--------------------------------------------------------------------------- FileNotFoundError Traceback (most recent call last) File <timed exec>:6, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:677, in datas(catalog_url, dfi, month, year, daskreport, lazy) 672 datadict, paramdict = getdict(dfi) 673 #print('datadict:',datadict) 674 #if datadict == {}: 675 # data=0 676 #else: --> 677 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 678 for s in paramdict: 679 print('param',s,'will be included in data') File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:496, in outputs(catalog_url, datadict, month, year, daskreport, lazy) 493 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"): 494 #ds=load_data_xios_patch(cat,filename,month,catalog_url) 495 print("lazy=",lazy) --> 496 ds = load_data_xios(cat,filename,items,month,year) if ('True' in lazy) else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True) 497 extime=time.time() - start 498 print(' took', extime, 'seconds') File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:467, in load_data_xios_kerchunk(cat, filename, items, month, year, rome) 465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe 466 print('using load_data_xios_kerchunk reading ',desc) --> 467 ds_x= [ prep( 468 cat.data_xios_kerchunk( 469 file=filename,month=month,year=year,eio=f'{xios:04}' 470 ).to_dask().drop_vars(dro,errors='ignore')[items]) 471 for xios in xioss] 473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal") File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/load.py:470, in <listcomp>(.0) 465 desc=cat.data_xios_kerchunk(file=filename,month=month,year=year).describe 466 print('using load_data_xios_kerchunk reading ',desc) 467 ds_x= [ prep( 468 cat.data_xios_kerchunk( 469 file=filename,month=month,year=year,eio=f'{xios:04}' --> 470 ).to_dask().drop_vars(dro,errors='ignore')[items]) 471 for xios in xioss] 473 return xr.concat(ds_x,dim='y',compat="override",coords="minimal") File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self) 67 def to_dask(self): 68 """Return xarray object where variables are dask arrays""" ---> 69 return self.read_chunked() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self) 42 def read_chunked(self): 43 """Return xarray object (which will have chunks)""" ---> 44 self._load_metadata() 45 return self._ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self) 234 """load metadata only if needed""" 235 if self._schema is None: --> 236 self._schema = self._get_schema() 237 self.dtype = self._schema.dtype 238 self.shape = self._schema.shape File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self) 15 self.urlpath = self._get_cache(self.urlpath)[0] 17 if self._ds is None: ---> 18 self._open_dataset() 20 metadata = { 21 'dims': dict(self._ds.dims), 22 'data_vars': {k: list(self._ds[k].coords) 23 for k in self._ds.data_vars.keys()}, 24 'coords': tuple(self._ds.coords.keys()), 25 } 26 if getattr(self, 'on_server', False): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/xzarr.py:46, in ZarrSource._open_dataset(self) 44 self._ds = xr.open_mfdataset(self.urlpath, **kw) 45 else: ---> 46 self._ds = xr.open_dataset(self.urlpath, **kw) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, backend_kwargs, **kwargs) 519 decoders = _resolve_decoders_kwargs( 520 decode_cf, 521 open_backend_dataset_parameters=backend.open_dataset_parameters, (...) 527 decode_coords=decode_coords, 528 ) 530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None) --> 531 backend_ds = backend.open_dataset( 532 filename_or_obj, 533 drop_variables=drop_variables, 534 **decoders, 535 **kwargs, 536 ) 537 ds = _dataset_from_backend_dataset( 538 backend_ds, 539 filename_or_obj, (...) 547 **kwargs, 548 ) 549 return ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:837, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel) 817 def open_dataset( 818 self, 819 filename_or_obj, (...) 833 stacklevel=3, 834 ): 836 filename_or_obj = _normalize_path(filename_or_obj) --> 837 store = ZarrStore.open_group( 838 filename_or_obj, 839 group=group, 840 mode=mode, 841 synchronizer=synchronizer, 842 consolidated=consolidated, 843 consolidate_on_close=False, 844 chunk_store=chunk_store, 845 storage_options=storage_options, 846 stacklevel=stacklevel + 1, 847 ) 849 store_entrypoint = StoreBackendEntrypoint() 850 with close_on_error(store): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/zarr.py:406, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel) 404 zarr_group = zarr.open_consolidated(store, **open_kwargs) 405 else: --> 406 zarr_group = zarr.open_group(store, **open_kwargs) 407 return cls( 408 zarr_group, 409 mode, (...) 413 safe_chunks, 414 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1316, in open_group(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version) 1270 """Open a group using file-mode-like semantics. 1271 1272 Parameters (...) 1312 1313 """ 1315 # handle polymorphic store arg -> 1316 store = _normalize_store_arg( 1317 store, storage_options=storage_options, mode=mode, 1318 zarr_version=zarr_version) 1319 if zarr_version is None: 1320 zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/hierarchy.py:1192, in _normalize_store_arg(store, storage_options, mode, zarr_version) 1190 if store is None: 1191 return MemoryStore() if zarr_version == 2 else MemoryStoreV3() -> 1192 return normalize_store_arg(store, 1193 storage_options=storage_options, mode=mode, 1194 zarr_version=zarr_version) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version) 168 from zarr._storage.v3 import _normalize_store_arg_v3 169 normalize_store = _normalize_store_arg_v3 --> 170 return normalize_store(store, storage_options, mode) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode) 141 if isinstance(store, str): 142 if "://" in store or "::" in store: --> 143 return FSStore(store, mode=mode, **(storage_options or {})) 144 elif storage_options: 145 raise ValueError("storage_options passed with non-fsspec path") File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.__init__(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options) 1319 if protocol in (None, "file") and not storage_options.get("auto_mkdir"): 1320 storage_options["auto_mkdir"] = True -> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options}) 1322 self.fs = self.map.fs # for direct operations 1323 self.path = self.fs._strip_protocol(url) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs) 199 """Create key-value interface for given URL and options 200 201 The URL will be of the form "protocol://location" and point to the root (...) 227 ``FSMap`` instance, the dict-like key-value store. 228 """ 229 # Removing protocol here - could defer to each open() on the backend --> 230 fs, urlpath = url_to_fs(url, **kwargs) 231 root = alternate_root if alternate_root is not None else urlpath 232 return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:412, in url_to_fs(url, **kwargs) 410 options = cls._get_kwargs_from_urls(url) 411 update_storage_options(options, kwargs) --> 412 fs = cls(**options) 413 urlpath = fs._strip_protocol(url) 414 return fs, urlpath File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:76, in _Cached.__call__(cls, *args, **kwargs) 74 return cls._cache[token] 75 else: ---> 76 obj = super().__call__(*args, **kwargs) 77 # Setting _fs_token here causes some static linters to complain. 78 obj._fs_token_ = token File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/reference.py:140, in ReferenceFileSystem.__init__(self, fo, target, ref_storage_args, target_protocol, target_options, remote_protocol, remote_options, fs, template_overrides, simple_templates, loop, **kwargs) 138 dic = dict(**(ref_storage_args or target_options or {}), **extra) 139 # text JSON --> 140 with open(fo, "rb", **dic) as f: 141 logger.info("Read reference from URL %s", fo) 142 text = f.read() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/core.py:104, in OpenFile.__enter__(self) 101 def __enter__(self): 102 mode = self.mode.replace("t", "").replace("b", "") + "b" --> 104 f = self.fs.open(self.path, mode=mode) 106 self.fobjects = [f] 108 if self.compression is not None: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/spec.py:1037, in AbstractFileSystem.open(self, path, mode, block_size, cache_options, compression, **kwargs) 1035 else: 1036 ac = kwargs.pop("autocommit", not self._intrans) -> 1037 f = self._open( 1038 path, 1039 mode=mode, 1040 block_size=block_size, 1041 autocommit=ac, 1042 cache_options=cache_options, 1043 **kwargs, 1044 ) 1045 if compression is not None: 1046 from fsspec.compression import compr File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:159, in LocalFileSystem._open(self, path, mode, block_size, **kwargs) 157 if self.auto_mkdir and "w" in mode: 158 self.makedirs(self._parent(path), exist_ok=True) --> 159 return LocalFileOpener(path, mode, fs=self, **kwargs) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:254, in LocalFileOpener.__init__(self, path, mode, autocommit, fs, compression, **kwargs) 252 self.compression = get_compression(path, compression) 253 self.blocksize = io.DEFAULT_BUFFER_SIZE --> 254 self._open() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/fsspec/implementations/local.py:259, in LocalFileOpener._open(self) 257 if self.f is None or self.f.closed: 258 if self.autocommit or "w" not in self.mode: --> 259 self.f = open(self.path, mode=self.mode) 260 if self.compression: 261 compress = compr[self.compression] FileNotFoundError: [Errno 2] No such file or directory: '//ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201204/gridT-2D_0000.json'
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
--------------------------------------------------------------------------- NameError Traceback (most recent call last) File <timed eval>:1, in <module> NameError: name 'data' is not defined