%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/talandel/TOOLS/monitor-sedna/notebook/core/monitor.py'>
If you submit the job with job scheduler; below are list of enviroment variable one can pass
local : if True run dask local cluster, if not true, put number of workers setted in the 'local' if no 'local ' given, local will be setted automatically to 'True'
%env ychunk='2', #%env tchunk='2'
controls chunk. 'False' sets no modification from original netcdf file's chunk.
ychunk=10 will group the original netcdf file to 10 by 10
tchunk=1 will chunk the time coordinate one by one
%env file_exp=
'file_exp': Which 'experiment' name is it? this corresopnds to intake catalog name without path and .yaml
#%env year=
for Validation, this correspoinds to path/year/month 's year for monitoring, this corresponids to 'date' having means do all files in the monitoring directory setting it as 0[0-9] &1[0-9]& [2-3][0-9], the job can be separated in three lots. For DELTA experiment, year corresponds to really 'year'
%env month=
for monitoring this corresponds to file path path-XIOS.{month}/
For DELTA experiment, year corresponds to really 'month'
proceed saving? True or False , Default is setted as True
proceed plotting? True or False , Default is setted as True
proceed computation? or just load computed result? True or False , Default is setted as True
save output file used for plotting
using kerchunked file -> False, not using kerhcunk -> True
name of control file to be used for computation/plots/save/ We have number of M_xxx.csv
Monitor.sh calls M_MLD_2D
and AWTD.sh, Fluxnet.sh, Siconc.sh, IceClim.sh, FWC_SSH.sh, Integrals.sh , Sections.sh
M_AWTMD
M_Fluxnet
M_Ice_quantities
M_IceClim M_IceConce M_IceThick
M_FWC_2D M_FWC_integrals M_FWC_SSH M_SSH_anomaly
M_Mean_temp_velo M_Mooring
M_Sectionx M_Sectiony
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene8000.c-irene.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False tgcc local cluster starting This code is running on irene8000.c-irene.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2013 on month= 06 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6570252irene8000.c-irene.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_06M_IceThick/ CPU times: user 512 ms, sys: 166 ms, total: 679 ms Wall time: 15.8 s
Client-9a1a0dea-247e-11ed-b063-080038bfd9c6
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
ec40d978
Dashboard: http://127.0.0.1:8787/status | Workers: 12 |
Total threads: 48 | Total memory: 2.86 TiB |
Status: running | Using processes: True |
Scheduler-61bcca59-0541-4c32-ba76-7af8bd8c09f3
Comm: tcp://127.0.0.1:45888 | Workers: 12 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 48 |
Started: Just now | Total memory: 2.86 TiB |
Comm: tcp://127.0.0.1:46562 | Total threads: 4 |
Dashboard: http://127.0.0.1:44237/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:34821 | |
Local directory: /tmp/dask-worker-space/worker-wa1bblsq |
Comm: tcp://127.0.0.1:33977 | Total threads: 4 |
Dashboard: http://127.0.0.1:39458/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:32844 | |
Local directory: /tmp/dask-worker-space/worker-mda4o9s3 |
Comm: tcp://127.0.0.1:37268 | Total threads: 4 |
Dashboard: http://127.0.0.1:42048/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:44935 | |
Local directory: /tmp/dask-worker-space/worker-90itkv5s |
Comm: tcp://127.0.0.1:33107 | Total threads: 4 |
Dashboard: http://127.0.0.1:45048/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:39889 | |
Local directory: /tmp/dask-worker-space/worker-8k17qucp |
Comm: tcp://127.0.0.1:43201 | Total threads: 4 |
Dashboard: http://127.0.0.1:36141/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:40572 | |
Local directory: /tmp/dask-worker-space/worker-j1m7rrj_ |
Comm: tcp://127.0.0.1:43626 | Total threads: 4 |
Dashboard: http://127.0.0.1:41978/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:38284 | |
Local directory: /tmp/dask-worker-space/worker-f0j143tb |
Comm: tcp://127.0.0.1:40486 | Total threads: 4 |
Dashboard: http://127.0.0.1:46430/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:41287 | |
Local directory: /tmp/dask-worker-space/worker-odmn2iv7 |
Comm: tcp://127.0.0.1:39753 | Total threads: 4 |
Dashboard: http://127.0.0.1:42295/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:36560 | |
Local directory: /tmp/dask-worker-space/worker-gw97w7bd |
Comm: tcp://127.0.0.1:43538 | Total threads: 4 |
Dashboard: http://127.0.0.1:40183/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:36945 | |
Local directory: /tmp/dask-worker-space/worker-5ljkqm86 |
Comm: tcp://127.0.0.1:42764 | Total threads: 4 |
Dashboard: http://127.0.0.1:42892/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:33823 | |
Local directory: /tmp/dask-worker-space/worker-mnhs8mmp |
Comm: tcp://127.0.0.1:40970 | Total threads: 4 |
Dashboard: http://127.0.0.1:41934/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:45378 | |
Local directory: /tmp/dask-worker-space/worker-fu8yfw6t |
Comm: tcp://127.0.0.1:46313 | Total threads: 4 |
Dashboard: http://127.0.0.1:39817/status | Memory: 244.27 GiB |
Nanny: tcp://127.0.0.1:38552 | |
Local directory: /tmp/dask-worker-space/worker-slxacqem |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
IceThickness | icemod.sivolu | (data.sivolu.where(data.sivolu >0)).to_dataset... | ALL | maps | Spectral | (0,5) | m | M-4 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing True lazy= True ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading <bound method DataSourceBase.describe of sources: param_xios: args: combine: nested concat_dim: y urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc xarray_kwargs: compat: override coords: minimal data_vars: minimal parallel: true description: SEDNA NEMO parameters from MPI output nav_lon lat fails driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/talandel/TOOLS/monitor-sedna/notebook/../lib/ > {'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}} 0 read icemod ['sivolu'] lazy= True using load_data_xios reading icemod using load_data_xios reading {'name': 'data_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO outputs from different xios server', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'name of config', 'type': 'str', 'default': '/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d'}, {'name': 'fileexp', 'description': 'name of config', 'type': 'str', 'default': 'SEDNA-DELTA'}, {'name': 'month', 'description': 'running number 2 digit', 'type': 'str', 'default': '02'}, {'name': 'freq', 'description': '1d or 1m', 'type': 'str', 'default': '1d'}, {'name': 'year', 'description': 'last digits of yearmonthdate.', 'type': 'str', 'default': '2012'}, {'name': 'file', 'description': 'file name', 'type': 'str', 'default': 'icemod'}, {'name': 'eio', 'description': 'xios mpi number', 'type': 'str', 'default': '0[0-5][0-9][0-9]'}], 'metadata': {}, 'args': {'urlpath': '{{path}}/{{year}}/{{month}}/*{{file}}_*_{{eio}}.nc', 'combine': 'nested', 'concat_dim': 'y'}}
HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 4075520, sblock->base_addr = 0, stored_eof = 114007180 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 21680128, sblock->base_addr = 0, stored_eof = 122315319 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 70184960, sblock->base_addr = 0, stored_eof = 121997013 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 1033216, sblock->base_addr = 0, stored_eof = 52611074 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 10657792, sblock->base_addr = 0, stored_eof = 58635869 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 3600384, sblock->base_addr = 0, stored_eof = 124635530 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 3424256, sblock->base_addr = 0, stored_eof = 75704137 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 4542464, sblock->base_addr = 0, stored_eof = 133734280 major: File accessibility minor: File has been truncated HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 2: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 12759040, sblock->base_addr = 0, stored_eof = 20752476 major: File accessibility minor: File has been truncated 2022-08-25 16:03:16,552 - distributed.worker - WARNING - Compute Failed Key: open_dataset-ab771ef7-3f38-4deb-b64d-1c0f428be834 Function: execute_task args: ((<function apply at 0x2aba95075000>, <function open_dataset at 0x2ababee3cca0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0521.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 2: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 3493888, sblock->base_addr = 0, stored_eof = 138106336 major: File accessibility minor: File has been truncated 2022-08-25 16:03:16,619 - distributed.worker - WARNING - Compute Failed Key: open_dataset-4416247e-6e13-4885-a3fe-10aae296c317 Function: execute_task args: ((<function apply at 0x2b711b2ad000>, <function open_dataset at 0x2b71451496c0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0334.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,639 - distributed.worker - WARNING - Compute Failed Key: open_dataset-4456e62a-6449-443d-9e08-86cdd6b4cc08 Function: execute_task args: ((<function apply at 0x2b711b2ad000>, <function open_dataset at 0x2b71451496c0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0459.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,685 - distributed.worker - WARNING - Compute Failed Key: open_dataset-449c8df6-dea0-4f4d-bc9c-70b691761a6d Function: execute_task args: ((<function apply at 0x2b711b2ad000>, <function open_dataset at 0x2b71451496c0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0380.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" HDF5-DIAG: Error detected in HDF5 (1.12.1) thread 1: #000: H5F.c line 620 in H5Fopen(): unable to open file major: File accessibility minor: Unable to open file #001: H5VLcallback.c line 3501 in H5VL_file_open(): failed to iterate over available VOL connector plugins major: Virtual Object Layer minor: Iteration failed #002: H5PLpath.c line 578 in H5PL__path_table_iterate(): can't iterate over plugins in plugin path '(null)' major: Plugin for dynamically loaded library minor: Iteration failed #003: H5PLpath.c line 620 in H5PL__path_table_iterate_process_path(): can't open directory: /ccc/cont003/home/ra5563/ra5563/monitor/lib/hdf5/plugin major: Plugin for dynamically loaded library minor: Can't open directory or file #004: H5VLcallback.c line 3351 in H5VL__file_open(): open failed major: Virtual Object Layer minor: Can't open object #005: H5VLnative_file.c line 97 in H5VL__native_file_open(): unable to open file major: File accessibility minor: Unable to open file #006: H5Fint.c line 1990 in H5F_open(): unable to read superblock major: File accessibility minor: Read failed #007: H5Fsuper.c line 614 in H5F__super_read(): truncated file: eof = 4136960, sblock->base_addr = 0, stored_eof = 41817654 major: File accessibility minor: File has been truncated 2022-08-25 16:03:16,738 - distributed.worker - WARNING - Compute Failed Key: open_dataset-6fb73819-fcbc-4551-a123-f6e69c343094 Function: execute_task args: ((<function apply at 0x2baa636e5000>, <function open_dataset at 0x2baa8d483130>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0175.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,833 - distributed.worker - WARNING - Compute Failed Key: open_dataset-0094aa03-f218-46fe-8f58-9aa1695d8f06 Function: execute_task args: ((<function apply at 0x2b771bfe5000>, <function open_dataset at 0x2b7745dd5360>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0480.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,860 - distributed.worker - WARNING - Compute Failed Key: open_dataset-c173c6b6-e40d-4721-a81b-2aa0ac66fabc Function: execute_task args: ((<function apply at 0x2ab3364f5000>, <function open_dataset at 0x2ab36030d510>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0489.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,860 - distributed.worker - WARNING - Compute Failed Key: open_dataset-c10277b4-350e-4fde-8843-234d4393b5e0 Function: execute_task args: ((<function apply at 0x2ab3364f5000>, <function open_dataset at 0x2ab36030d510>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0282.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,860 - distributed.worker - WARNING - Compute Failed Key: open_dataset-c1778f88-b0a0-49ed-9322-312770a6ac17 Function: execute_task args: ((<function apply at 0x2ab3364f5000>, <function open_dataset at 0x2ab36030d510>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0523.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,862 - distributed.worker - WARNING - Compute Failed Key: open_dataset-c171ccb7-201b-4dfa-aefd-bbdbec2e3a02 Function: execute_task args: ((<function apply at 0x2ab3364f5000>, <function open_dataset at 0x2ab36030d510>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0398.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,863 - distributed.worker - WARNING - Compute Failed Key: open_dataset-6dcbdaef-219d-460c-bbaf-3bbb92f9a9c2 Function: execute_task args: ((<function apply at 0x2baa636e5000>, <function open_dataset at 0x2baa8d483130>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0490.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,874 - distributed.worker - WARNING - Compute Failed Key: open_dataset-2bad98fe-db10-47b7-ba8d-d9b8ef4f4ee9 Function: execute_task args: ((<function apply at 0x2ac2d27f9000>, <function open_dataset at 0x2ac2fc626200>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0304.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,897 - distributed.worker - WARNING - Compute Failed Key: open_dataset-ea8b618e-cec6-4af9-bcad-96ee63fdbc4c Function: execute_task args: ((<function apply at 0x2af6c9d4d000>, <function open_dataset at 0x2af6efb59750>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0208.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,898 - distributed.worker - WARNING - Compute Failed Key: open_dataset-e8c49a94-b716-4388-a67e-c49d9c34bf0a Function: execute_task args: ((<function apply at 0x2af6c9d4d000>, <function open_dataset at 0x2af6efb59750>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0495.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,899 - distributed.worker - WARNING - Compute Failed Key: open_dataset-ea82b3aa-c4e8-4b48-aa42-5b3928c6ba5d Function: execute_task args: ((<function apply at 0x2af6c9d4d000>, <function open_dataset at 0x2af6efb59750>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0229.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,918 - distributed.worker - WARNING - Compute Failed Key: open_dataset-9690fda2-477c-4415-90e1-e264adb364d1 Function: execute_task args: ((<function apply at 0x2abce60a1000>, <function open_dataset at 0x2abd0be3f760>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0338.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,918 - distributed.worker - WARNING - Compute Failed Key: open_dataset-97d0e1ee-b08b-42eb-b7a3-b9bdafe9dcfa Function: execute_task args: ((<function apply at 0x2abce60a1000>, <function open_dataset at 0x2abd0be3f760>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0407.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:16,985 - distributed.worker - WARNING - Compute Failed Key: open_dataset-439747f8-8a4a-4838-9f65-c6325c6725b3 Function: execute_task args: ((<function apply at 0x2b711b2ad000>, <function open_dataset at 0x2b71451496c0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0260.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:16,986 - distributed.worker - WARNING - Compute Failed Key: open_dataset-4313dc2d-aa7f-4ae3-91b2-71be70c099b3 Function: execute_task args: ((<function apply at 0x2b711b2ad000>, <function open_dataset at 0x2b71451496c0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0451.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:17,023 - distributed.worker - WARNING - Compute Failed Key: open_dataset-569224a7-1761-4d39-b813-1aaea8311316 Function: execute_task args: ((<function apply at 0x2b27e2cc1000>, <function open_dataset at 0x2b280caa1240>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0488.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:17,023 - distributed.worker - WARNING - Compute Failed Key: open_dataset-57de01e8-5af0-44cd-b4eb-6077183d2802 Function: execute_task args: ((<function apply at 0x2b27e2cc1000>, <function open_dataset at 0x2b280caa1240>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0487.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:17,024 - distributed.worker - WARNING - Compute Failed Key: open_dataset-55330991-17e4-4a82-9c92-a29f14510d16 Function: execute_task args: ((<function apply at 0x2b27e2cc1000>, <function open_dataset at 0x2b280caa1240>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0442.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:28,308 - distributed.worker - WARNING - Compute Failed Key: open_dataset-aabd1586-7154-4ea6-be9a-933d1e5721cf Function: execute_task args: ((<function apply at 0x2aba95075000>, <function open_dataset at 0x2ababee3cca0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0306.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:28,308 - distributed.worker - WARNING - Compute Failed Key: open_dataset-aaa4203e-f160-449c-954c-15eaebc86aa4 Function: execute_task args: ((<function apply at 0x2aba95075000>, <function open_dataset at 0x2ababee3cca0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0249.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:28,308 - distributed.worker - WARNING - Compute Failed Key: open_dataset-aa2ff96d-1a12-4d85-aa06-3cb155c5be4b Function: execute_task args: ((<function apply at 0x2aba95075000>, <function open_dataset at 0x2ababee3cca0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0501.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:28,308 - distributed.worker - WARNING - Compute Failed Key: open_dataset-7f1f2bce-b747-4b74-aaa8-da22ad820efe Function: execute_task args: ((<function apply at 0x2ba0001a9000>, <function open_dataset at 0x2ba029fc0160>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0450.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: 'ValueError("did not find a match in any of xarray\'s currently installed IO backends [\'netcdf4\', \'scipy\', \'zarr\']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:\\nhttps://docs.xarray.dev/en/stable/getting-started-guide/installing.html\\nhttps://docs.xarray.dev/en/stable/user-guide/io.html")' 2022-08-25 16:03:28,308 - distributed.worker - WARNING - Compute Failed Key: open_dataset-7f7fdabd-0845-4661-969b-ed5d41384c67 Function: execute_task args: ((<function apply at 0x2ba0001a9000>, <function open_dataset at 0x2ba029fc0160>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0182.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:28,323 - distributed.worker - WARNING - Compute Failed Key: open_dataset-1a48a699-c81a-4c76-a2f7-05f80ecd7dfb Function: execute_task args: ((<function apply at 0x2aac3db49000>, <function open_dataset at 0x2aac639267a0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0425.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')" 2022-08-25 16:03:28,323 - distributed.worker - WARNING - Compute Failed Key: open_dataset-188906ab-62f6-4b0f-b4be-3327943153d1 Function: execute_task args: ((<function apply at 0x2aac3db49000>, <function open_dataset at 0x2aac639267a0>, ['/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0333.nc'], (<class 'dict'>, [['engine', None], ['chunks', (<class 'dict'>, [])], ['drop_variables', (<class 'set'>, ['intstrx', 'mldkz5', 'iicestrv', 'depthu_bounds', 'vwspd10', 'sozotaux', 'sitemp', 'rhop_sig0', 'snvolu', 'iicestru', 'sometauy', 'sistre', 'siages', 'utau_iceoce', 'sishea', 'botpres', 'sidive', 'time_counter_bounds', 'vtau_iceoce', 'time_centered_bounds', 'sisali', 'utau_atmoce', 'vtau_atmoce', 'snthic', 'intstry', 'deptht_bounds', 'uwspd10'])]]))) kwargs: {} Exception: "OSError(-101, 'NetCDF: HDF error')"
--------------------------------------------------------------------------- OSError Traceback (most recent call last) File <timed exec>:6, in <module> File /ccc/work/cont003/gen7420/talandel/TOOLS/monitor-sedna/notebook/core/load.py:681, in datas(catalog_url, dfi, month, year, daskreport, lazy) 676 datadict, paramdict = getdict(dfi) 677 #print('datadict:',datadict) 678 #if datadict == {}: 679 # data=0 680 #else: --> 681 data=outputs(catalog_url,datadict,month,year,daskreport,lazy) 682 for s in paramdict: 683 print('param',s,'will be included in data') File /ccc/work/cont003/gen7420/talandel/TOOLS/monitor-sedna/notebook/core/load.py:499, in outputs(catalog_url, datadict, month, year, daskreport, lazy) 496 with performance_report(filename=daskreport+"_load_output_"+filename+"_"+month+year+".html"): 497 #ds=load_data_xios_patch(cat,filename,month,catalog_url) 498 print("lazy=",lazy) --> 499 ds = load_data_xios(cat,filename,items,month,year) if ('True' in lazy) else load_data_xios_kerchunk(cat,filename,items,month,year,rome=True) 500 extime=time.time() - start 501 print(' took', extime, 'seconds') File /ccc/work/cont003/gen7420/talandel/TOOLS/monitor-sedna/notebook/core/load.py:374, in load_data_xios(cat, filename, items, month, year) 372 desc=cat.data_xios(file=filename,month=month,year=year ,xarray_kwargs=xarray_kwargs).describe() 373 print('using load_data_xios reading ',desc) --> 374 ds = cat.data_xios(file=filename,month=month,year=year ,xarray_kwargs=xarray_kwargs).to_dask() 375 ds = ds[items] 376 return ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:69, in DataSourceMixin.to_dask(self) 67 def to_dask(self): 68 """Return xarray object where variables are dask arrays""" ---> 69 return self.read_chunked() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:44, in DataSourceMixin.read_chunked(self) 42 def read_chunked(self): 43 """Return xarray object (which will have chunks)""" ---> 44 self._load_metadata() 45 return self._ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake/source/base.py:236, in DataSourceBase._load_metadata(self) 234 """load metadata only if needed""" 235 if self._schema is None: --> 236 self._schema = self._get_schema() 237 self.dtype = self._schema.dtype 238 self.shape = self._schema.shape File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/base.py:18, in DataSourceMixin._get_schema(self) 15 self.urlpath = self._get_cache(self.urlpath)[0] 17 if self._ds is None: ---> 18 self._open_dataset() 20 metadata = { 21 'dims': dict(self._ds.dims), 22 'data_vars': {k: list(self._ds[k].coords) 23 for k in self._ds.data_vars.keys()}, 24 'coords': tuple(self._ds.coords.keys()), 25 } 26 if getattr(self, 'on_server', False): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/intake_xarray/netcdf.py:92, in NetCDFSource._open_dataset(self) 88 else: 89 # https://github.com/intake/filesystem_spec/issues/476#issuecomment-732372918 90 url = fsspec.open(self.urlpath, **self.storage_options).open() ---> 92 self._ds = _open_dataset(url, chunks=self.chunks, **kwargs) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:980, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs) 975 datasets = [preprocess(ds) for ds in datasets] 977 if parallel: 978 # calling compute here will return the datasets/file_objs lists, 979 # the underlying datasets will still be stored as dask arrays --> 980 datasets, closers = dask.compute(datasets, closers) 982 # Combine all datasets, closing them in case of a ValueError 983 try: File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/dask/base.py:598, in compute(traverse, optimize_graph, scheduler, get, *args, **kwargs) 595 keys.append(x.__dask_keys__()) 596 postcomputes.append(x.__dask_postcompute__()) --> 598 results = schedule(dsk, keys, **kwargs) 599 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/client.py:3001, in Client.get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs) 2999 should_rejoin = False 3000 try: -> 3001 results = self.gather(packed, asynchronous=asynchronous, direct=direct) 3002 finally: 3003 for f in futures.values(): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/client.py:2175, in Client.gather(self, futures, errors, direct, asynchronous) 2173 else: 2174 local_worker = None -> 2175 return self.sync( 2176 self._gather, 2177 futures, 2178 errors=errors, 2179 direct=direct, 2180 local_worker=local_worker, 2181 asynchronous=asynchronous, 2182 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/utils.py:338, in SyncMethodMixin.sync(self, func, asynchronous, callback_timeout, *args, **kwargs) 336 return future 337 else: --> 338 return sync( 339 self.loop, func, *args, callback_timeout=callback_timeout, **kwargs 340 ) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/utils.py:405, in sync(loop, func, callback_timeout, *args, **kwargs) 403 if error: 404 typ, exc, tb = error --> 405 raise exc.with_traceback(tb) 406 else: 407 return result File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/utils.py:378, in sync.<locals>.f() 376 future = asyncio.wait_for(future, callback_timeout) 377 future = asyncio.ensure_future(future) --> 378 result = yield future 379 except Exception: 380 error = sys.exc_info() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/tornado/gen.py:762, in Runner.run(self) 759 exc_info = None 761 try: --> 762 value = future.result() 763 except Exception: 764 exc_info = sys.exc_info() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/distributed/client.py:2038, in Client._gather(self, futures, errors, direct, local_worker) 2036 exc = CancelledError(key) 2037 else: -> 2038 raise exception.with_traceback(traceback) 2039 raise exc 2040 if errors == "skip": File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/dask/utils.py:41, in apply() 39 def apply(func, args, kwargs=None): 40 if kwargs: ---> 41 return func(*args, **kwargs) 42 else: 43 return func(*args) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/api.py:531, in open_dataset() 519 decoders = _resolve_decoders_kwargs( 520 decode_cf, 521 open_backend_dataset_parameters=backend.open_dataset_parameters, (...) 527 decode_coords=decode_coords, 528 ) 530 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None) --> 531 backend_ds = backend.open_dataset( 532 filename_or_obj, 533 drop_variables=drop_variables, 534 **decoders, 535 **kwargs, 536 ) 537 ds = _dataset_from_backend_dataset( 538 backend_ds, 539 filename_or_obj, (...) 547 **kwargs, 548 ) 549 return ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:555, in open_dataset() 534 def open_dataset( 535 self, 536 filename_or_obj, (...) 551 autoclose=False, 552 ): 554 filename_or_obj = _normalize_path(filename_or_obj) --> 555 store = NetCDF4DataStore.open( 556 filename_or_obj, 557 mode=mode, 558 format=format, 559 group=group, 560 clobber=clobber, 561 diskless=diskless, 562 persist=persist, 563 lock=lock, 564 autoclose=autoclose, 565 ) 567 store_entrypoint = StoreBackendEntrypoint() 568 with close_on_error(store): File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:384, in open() 378 kwargs = dict( 379 clobber=clobber, diskless=diskless, persist=persist, format=format 380 ) 381 manager = CachingFileManager( 382 netCDF4.Dataset, filename, mode=mode, kwargs=kwargs 383 ) --> 384 return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:332, in __init__() 330 self._group = group 331 self._mode = mode --> 332 self.format = self.ds.data_model 333 self._filename = self.ds.filepath() 334 self.is_remote = is_remote_uri(self._filename) File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:393, in ds() 391 @property 392 def ds(self): --> 393 return self._acquire() File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:387, in _acquire() 386 def _acquire(self, needs_lock=True): --> 387 with self._manager.acquire_context(needs_lock) as root: 388 ds = _nc4_require_group(root, self._group, self._mode) 389 return ds File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/contextlib.py:135, in __enter__() 133 del self.args, self.kwds, self.func 134 try: --> 135 return next(self.gen) 136 except StopIteration: 137 raise RuntimeError("generator didn't yield") from None File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/file_manager.py:189, in acquire_context() 186 @contextlib.contextmanager 187 def acquire_context(self, needs_lock=True): 188 """Context manager for acquiring a file.""" --> 189 file, cached = self._acquire_with_cache_info(needs_lock) 190 try: 191 yield file File /ccc/cont003/home/ra5563/ra5563/monitor/lib/python3.10/site-packages/xarray/backends/file_manager.py:207, in _acquire_with_cache_info() 205 kwargs = kwargs.copy() 206 kwargs["mode"] = self._mode --> 207 file = self._opener(*self._args, **kwargs) 208 if self._mode == "w": 209 # ensure file doesn't get overridden when opened again 210 self._mode = "a" File src/netCDF4/_netCDF4.pyx:2353, in netCDF4._netCDF4.Dataset.__init__() File src/netCDF4/_netCDF4.pyx:1963, in netCDF4._netCDF4._ensure_nc_success() OSError: [Errno -101] NetCDF: HDF error: b'/ccc/scratch/cont003/gen7420/talandel/SEDNA/SEDNA-DELTA-S/SPLIT/1d/2013/06/SEDNA-DELTA_1d_icemod_201306-201306_0521.nc'
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
--------------------------------------------------------------------------- NameError Traceback (most recent call last) File <timed eval>:1, in <module> NameError: name 'data' is not defined