%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
# 'month': = 'JOBID' almost month but not really,
# If you submit the job with job scheduler, above
#below are list of enviroment variable one can pass
#%env local='2"
# local : if True run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.
# ychunk=10 will group the original netcdf file to 10 by 10
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH
# name of control file to be used for computation/plots/save/
#%env file_exp=
# 'file_exp': Which 'experiment' name is it?
#. this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring this corresponds to file path path-XIOS.{month}/
#
#%env save= proceed saving? True or False , Default is setted as True
#%env plot= proceed plotting? True or False , Default is setted as True
#%env calc= proceed computation? or just load computed result? True or False , Default is setted as True
#%env save=False
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene5935.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False not local in tgcc rome local cluster starting This code is running on irene5935.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_ALPHA_MONITOR file experiment, read from ../lib/SEDNA_ALPHA_MONITOR.yaml on year= *1[0-9] on month= 26 outputpath= ../results/SEDNA_ALPHA_MONITOR/26/ daskreport= ../results/dask/2586518irene5935.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_ALPHA_MONITOR_26FWC_2D/ CPU times: user 293 ms, sys: 253 ms, total: 546 ms Wall time: 10.7 s
Client
|
Cluster
|
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
FWC_2D | gridS.vosaline,param.mask,param.e3t,param.e1te2t | calc.FWC2D_UFUNC(data) | BBFG | maps | Spectral_r | (0,24) | m | S-1 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing True ../lib/SEDNA_ALPHA_MONITOR.yaml using param_xios reading ../lib/SEDNA_ALPHA_MONITOR.yaml using param_xios reading <bound method DataSourceBase.describe of sources: param_xios: args: combine: by_coords concat_dim: y urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc xarray_kwargs: compat: override coords: minimal data_vars: minimal parallel: true description: SEDNA NEMO parameters from MPI output nav_lon lat fails driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > {'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'by_coords', 'concat_dim': 'y'}} 0 read gridS ['vosaline'] using load_data_xios reading gridS using load_data_xios reading <bound method DataSourceBase.describe of sources: data_xios: args: combine: by_coords concat_dim: time_counter,x,y urlpath: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RUNS/SEDNA-ALPHA-XIOS.26/SEDNA-ALPHA_1d_gridS_*1[0-9]_0[0-5][0-9][0-9].nc xarray_kwargs: compat: override coords: minimal data_vars: minimal drop_variables: !!set deptht_bounds: null depthu_bounds: null nav_lat: null nav_lon: null time_centerd: null time_centered_bounds: null time_counter_bounds: null parallel: true preprocess: !!python/name:core.load.prep '' description: SEDNA NEMO outputs from different xios server driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 489.27909684181213 seconds 0 merging gridS ['vosaline'] param nav_lat will be included in data param mask2d will be included in data param e1te2t will be included in data param e3t will be included in data param mask will be included in data param nav_lon will be included in data sum_num (13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12) start rechunking with (130, 122, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 48) end of y_rechunk CPU times: user 40.9 s, sys: 15.1 s, total: 55.9 s Wall time: 8min 25s
<xarray.Dataset> Dimensions: (t: 5, x: 6560, y: 6540, z: 150) Coordinates: * t (t) object 2004-10-10 12:00:00 ... 2004-10-14 12:00:00 * y (y) int64 1 2 3 4 5 6 7 8 ... 6534 6535 6536 6537 6538 6539 6540 * x (x) int64 1 2 3 4 5 6 7 8 ... 6554 6555 6556 6557 6558 6559 6560 * z (z) int64 1 2 3 4 5 6 7 8 9 ... 143 144 145 146 147 148 149 150 nav_lat (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> mask2d (y, x) bool dask.array<chunksize=(130, 6560), meta=np.ndarray> e1te2t (y, x) float64 dask.array<chunksize=(130, 6560), meta=np.ndarray> e3t (z, y, x) float64 dask.array<chunksize=(150, 130, 6560), meta=np.ndarray> mask (z, y, x) bool dask.array<chunksize=(150, 130, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> Data variables: vosaline (t, z, y, x) float32 dask.array<chunksize=(1, 150, 130, 6560), meta=np.ndarray> Attributes: name: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RU... description: ocean T grid variables title: ocean T grid variables Conventions: CF-1.6 timeStamp: 2021-Aug-16 16:21:48 GMT uuid: 0b0731d9-72a5-4aef-ba02-6762dacf9ad6 ibegin: 0 ni: 6560 jbegin: 0 nj: 13 DOMAIN_number_total: 544 DOMAIN_number: 0 DOMAIN_dimensions_ids: [2 3] DOMAIN_size_global: [6560 6540] DOMAIN_size_local: [6560 13] DOMAIN_position_first: [1 1] DOMAIN_position_last: [6560 13] DOMAIN_halo_size_start: [0 0] DOMAIN_halo_size_end: [0 0] DOMAIN_type: box start_date: 20030101 output_frequency: 1d CONFIG: SEDNA CASE: ALPHA
array([cftime.DatetimeNoLeap(2004, 10, 10, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 11, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 12, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 13, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 14, 12, 0, 0, 0)], dtype=object)
array([ 1, 2, 3, ..., 6538, 6539, 6540])
array([ 1, 2, 3, ..., 6558, 6559, 6560])
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150])
|
|
|
|
|
|
|
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= True #save= True #plot= False Zone='BBFG' Value='FWC_2D' cmap='Spectral_r' clabel='m' clim= (0, 24) outputpath='../results/SEDNA_ALPHA_MONITOR/26/' nc_outputpath='../nc_results/SEDNA_ALPHA_MONITOR/26/' filename='SEDNA_maps_BBFG_FWC_2D' #2 Zooming Data dataa= zoom.BBFG(data)
<xarray.Dataset> Dimensions: (t: 5, x: 6560, y: 5264, z: 150) Coordinates: * x (x) int64 1 2 3 4 5 6 7 8 ... 6554 6555 6556 6557 6558 6559 6560 * y (y) int64 1277 1278 1279 1280 1281 ... 6536 6537 6538 6539 6540 * t (t) object 2004-10-10 12:00:00 ... 2004-10-14 12:00:00 * z (z) int64 1 2 3 4 5 6 7 8 9 ... 143 144 145 146 147 148 149 150 nav_lat (y, x) float32 dask.array<chunksize=(56, 6560), meta=np.ndarray> mask2d (y, x) bool dask.array<chunksize=(56, 6560), meta=np.ndarray> e1te2t (y, x) float64 dask.array<chunksize=(56, 6560), meta=np.ndarray> e3t (z, y, x) float64 dask.array<chunksize=(150, 56, 6560), meta=np.ndarray> mask (z, y, x) bool dask.array<chunksize=(150, 56, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(56, 6560), meta=np.ndarray> Data variables: vosaline (t, z, y, x) float32 dask.array<chunksize=(1, 150, 56, 6560), meta=np.ndarray> Attributes: name: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RU... description: ocean T grid variables title: ocean T grid variables Conventions: CF-1.6 timeStamp: 2021-Aug-16 16:21:48 GMT uuid: 0b0731d9-72a5-4aef-ba02-6762dacf9ad6 ibegin: 0 ni: 6560 jbegin: 0 nj: 13 DOMAIN_number_total: 544 DOMAIN_number: 0 DOMAIN_dimensions_ids: [2 3] DOMAIN_size_global: [6560 6540] DOMAIN_size_local: [6560 13] DOMAIN_position_first: [1 1] DOMAIN_position_last: [6560 13] DOMAIN_halo_size_start: [0 0] DOMAIN_halo_size_end: [0 0] DOMAIN_type: box start_date: 20030101 output_frequency: 1d CONFIG: SEDNA CASE: ALPHA
array([ 1, 2, 3, ..., 6558, 6559, 6560])
array([1277, 1278, 1279, ..., 6538, 6539, 6540])
array([cftime.DatetimeNoLeap(2004, 10, 10, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 11, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 12, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 13, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 14, 12, 0, 0, 0)], dtype=object)
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150])
|
|
|
|
|
|
|
#3 Start computing dtaa= calc.FWC2D_UFUNC(data)
<xarray.Dataset> Dimensions: (t: 5, x: 6560, y: 5264) Coordinates: * x (x) int64 1 2 3 4 5 6 7 8 ... 6554 6555 6556 6557 6558 6559 6560 * y (y) int64 1277 1278 1279 1280 1281 ... 6536 6537 6538 6539 6540 * t (t) object 2004-10-10 12:00:00 ... 2004-10-14 12:00:00 nav_lat (y, x) float32 dask.array<chunksize=(56, 6560), meta=np.ndarray> mask2d (y, x) bool dask.array<chunksize=(56, 6560), meta=np.ndarray> e1te2t (y, x) float64 dask.array<chunksize=(56, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(56, 6560), meta=np.ndarray> Data variables: FWC2D (t, y, x) float32 dask.array<chunksize=(1, 56, 6560), meta=np.ndarray>
array([ 1, 2, 3, ..., 6558, 6559, 6560])
array([1277, 1278, 1279, ..., 6538, 6539, 6540])
array([cftime.DatetimeNoLeap(2004, 10, 10, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 11, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 12, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 13, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 10, 14, 12, 0, 0, 0)], dtype=object)
|
|
|
|
|
#4 Saving SEDNA_maps_BBFG_FWC_2D dtaa=save.datas(data,plot=Plot,path=nc_outputpath,filename=filename) start saving data saving data in a file t (1, 1, 1, 1, 1) 0 1 2 3 4 slice(0, 1, None)
--------------------------------------------------------------------------- TimeoutError Traceback (most recent call last) ~/monitor/lib/python3.7/site-packages/distributed/comm/core.py in connect() 287 connector.connect(loc, deserialize=deserialize, **connection_args), --> 288 timeout=min(intermediate_cap, time_left()), 289 ) ~/monitor/lib/python3.7/asyncio/tasks.py in wait_for() 448 await _cancel_and_wait(fut, loop=loop) --> 449 raise futures.TimeoutError() 450 finally: TimeoutError: The above exception was the direct cause of the following exception: OSError Traceback (most recent call last) <timed eval> in <module> /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py in auto(df, val, savefig, daskreport, outputpath, file_exp) 77 print('dtaa=save.datas(data,plot=Plot,path=nc_outputpath,filename=filename)' ) 78 with performance_report(filename=daskreport+"_save_"+step.Value+".html"): ---> 79 save.datas(data,plot=Plot,path=nc_outputpath,filename=filename) 80 # 5. Plot 81 if plotswitch=='True': /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in datas(data, plot, path, filename) 22 twoD(data,path,filename,nested=False) 23 else : ---> 24 twoD(data,path,filename) 25 return None 26 /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in twoD(data, path, filename, nested) 55 print('saving data in a file') 56 filesave=path+filename ---> 57 return to_mfnetcdf_map(data,prefix=filesave, nested=nested) 58 59 def twoD_onefile(data /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in to_mfnetcdf_map(ds, prefix, nested) 226 ,template=template 227 ) --> 228 mapped.compute() 229 230 return mapped#.compute() ~/monitor/lib/python3.7/site-packages/xarray/core/dataset.py in compute(self, **kwargs) 904 """ 905 new = self.copy(deep=False) --> 906 return new.load(**kwargs) 907 908 def _persist_inplace(self, **kwargs) -> "Dataset": ~/monitor/lib/python3.7/site-packages/xarray/core/dataset.py in load(self, **kwargs) 739 740 # evaluate all the dask arrays simultaneously --> 741 evaluated_data = da.compute(*lazy_data.values(), **kwargs) 742 743 for k, data in zip(lazy_data, evaluated_data): ~/monitor/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs) 561 postcomputes.append(x.__dask_postcompute__()) 562 --> 563 results = schedule(dsk, keys, **kwargs) 564 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) 565 ~/monitor/lib/python3.7/site-packages/distributed/client.py in get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs) 2653 should_rejoin = False 2654 try: -> 2655 results = self.gather(packed, asynchronous=asynchronous, direct=direct) 2656 finally: 2657 for f in futures.values(): ~/monitor/lib/python3.7/site-packages/distributed/client.py in gather(self, futures, errors, direct, asynchronous) 1968 direct=direct, 1969 local_worker=local_worker, -> 1970 asynchronous=asynchronous, 1971 ) 1972 ~/monitor/lib/python3.7/site-packages/distributed/client.py in sync(self, func, asynchronous, callback_timeout, *args, **kwargs) 837 else: 838 return sync( --> 839 self.loop, func, *args, callback_timeout=callback_timeout, **kwargs 840 ) 841 ~/monitor/lib/python3.7/site-packages/distributed/utils.py in sync(loop, func, callback_timeout, *args, **kwargs) 338 if error[0]: 339 typ, exc, tb = error[0] --> 340 raise exc.with_traceback(tb) 341 else: 342 return result[0] ~/monitor/lib/python3.7/site-packages/distributed/utils.py in f() 322 if callback_timeout is not None: 323 future = asyncio.wait_for(future, callback_timeout) --> 324 result[0] = yield future 325 except Exception as exc: 326 error[0] = sys.exc_info() ~/monitor/lib/python3.7/site-packages/tornado/gen.py in run(self) 760 761 try: --> 762 value = future.result() 763 except Exception: 764 exc_info = sys.exc_info() ~/monitor/lib/python3.7/site-packages/distributed/client.py in _gather(self, futures, errors, direct, local_worker) 1827 exc = CancelledError(key) 1828 else: -> 1829 raise exception.with_traceback(traceback) 1830 raise exc 1831 if errors == "skip": ~/monitor/lib/python3.7/site-packages/xarray/core/parallel.py in _wrapper() 284 ] 285 --> 286 result = func(*converted_args, **kwargs) 287 288 # check all dims are present /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py in create_eachfile() 197 filepath=filepath[:-1]+'.nc' 198 os.makedirs(os.path.dirname(filepath), exist_ok=True) --> 199 ds.to_netcdf(filepath,mode='w') 200 # print('debug filename list',filepath) 201 dd=ds ~/monitor/lib/python3.7/site-packages/xarray/core/dataset.py in to_netcdf() 1652 unlimited_dims=unlimited_dims, 1653 compute=compute, -> 1654 invalid_netcdf=invalid_netcdf, 1655 ) 1656 ~/monitor/lib/python3.7/site-packages/xarray/backends/api.py in to_netcdf() 1072 1073 # handle scheduler specific logic -> 1074 scheduler = _get_scheduler() 1075 have_chunks = any(v.chunks for v in dataset.variables.values()) 1076 ~/monitor/lib/python3.7/site-packages/xarray/backends/locks.py in _get_scheduler() 78 from dask.base import get_scheduler # noqa: F401 79 ---> 80 actual_get = get_scheduler(get, collection) 81 except ImportError: 82 return None ~/monitor/lib/python3.7/site-packages/dask/base.py in get_scheduler() 1155 from distributed.worker import get_worker 1156 -> 1157 return get_worker().client.get 1158 1159 if cls is not None: ~/monitor/lib/python3.7/site-packages/distributed/worker.py in client() 3047 return self._client 3048 else: -> 3049 return self._get_client() 3050 3051 def _get_client(self, timeout=None): ~/monitor/lib/python3.7/site-packages/distributed/worker.py in _get_client() 3090 direct_to_workers=True, 3091 name="worker", -> 3092 timeout=timeout, 3093 ) 3094 if not asynchronous: ~/monitor/lib/python3.7/site-packages/distributed/client.py in __init__() 745 ext(self) 746 --> 747 self.start(timeout=timeout) 748 Client._instances.add(self) 749 ~/monitor/lib/python3.7/site-packages/distributed/client.py in start() 953 self._started = asyncio.ensure_future(self._start(**kwargs)) 954 else: --> 955 sync(self.loop, self._start, **kwargs) 956 957 def __await__(self): ~/monitor/lib/python3.7/site-packages/distributed/utils.py in sync() 338 if error[0]: 339 typ, exc, tb = error[0] --> 340 raise exc.with_traceback(tb) 341 else: 342 return result[0] ~/monitor/lib/python3.7/site-packages/distributed/utils.py in f() 322 if callback_timeout is not None: 323 future = asyncio.wait_for(future, callback_timeout) --> 324 result[0] = yield future 325 except Exception as exc: 326 error[0] = sys.exc_info() ~/monitor/lib/python3.7/site-packages/tornado/gen.py in run() 760 761 try: --> 762 value = future.result() 763 except Exception: 764 exc_info = sys.exc_info() ~/monitor/lib/python3.7/site-packages/distributed/client.py in _start() 1043 1044 try: -> 1045 await self._ensure_connected(timeout=timeout) 1046 except (OSError, ImportError): 1047 await self._close() ~/monitor/lib/python3.7/site-packages/distributed/client.py in _ensure_connected() 1101 try: 1102 comm = await connect( -> 1103 self.scheduler.address, timeout=timeout, **self.connection_args 1104 ) 1105 comm.name = "Client->Scheduler" ~/monitor/lib/python3.7/site-packages/distributed/comm/core.py in connect() 308 raise IOError( 309 f"Timed out trying to connect to {addr} after {timeout} s" --> 310 ) from active_exception 311 312 local_info = { OSError: Timed out trying to connect to tcp://127.0.0.1:38627 after 10 s