%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
# 'month': = 'JOBID' almost month but not really,
# If you submit the job with job scheduler, above
#below are list of enviroment variable one can pass
#%env local='2"
# local : if True run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.
# ychunk=10 will group the original netcdf file to 10 by 10
# tchunk=1 will chunk the time coordinate one by one
#%env control=Ints_monitor
# name of control file to be used for computation/plots/save/
#%env file_exp=
# 'file_exp': Which 'experiment' name is it?
#. this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring this corresponds to file path path-XIOS.{month}/
#
#%env save= proceed saving? True or False , Default is setted as True
#%env plot= proceed plotting? True or False , Default is setted as True
#%env calc= proceed computation? or just load computed result? True or False , Default is setted as True
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene5169.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False not local in tgcc rome local cluster starting This code is running on irene5169.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_ALPHA_MONITOR file experiment, read from ../lib/SEDNA_ALPHA_MONITOR.yaml on year= * on month= 22 outputpath= ../results/rome_SEDNA_ALPHA_MONITOR/22/ daskreport= ../results/dask/2495890irene5169.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_ALPHA_MONITOR_22section_moni/ CPU times: user 339 ms, sys: 235 ms, total: 574 ms Wall time: 10.1 s
Client
|
Cluster
|
df=load.controlfile(control)
#Take out 'later' tagged computations
df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
Section | gridS.vosaline,gridT.votemper,gridV.vomecrty,p... | data.drop_vars('vozocrtx') | FramS | section | None | {'vosaline': (33,36.2), 'votemper': (-2,6), 'v... | None | S-1 | ||
Section | gridS.vosaline,gridT.votemper,gridU.vozocrtx,p... | data.drop_vars('vomecrty') | BFGS | section | None | {'vosaline': (28,35), 'votemper': (-2,2), 'voz... | None | S-2 |
Each computation consists of
%%time
#todo add 'year' here.
data=load.datas(catalog_url,df.Inputs,month,year,daskreport)
#print('#1 Data: created:')
#print('# if we raed too much file, we can do sel to take out some dates here')
data
../lib/SEDNA_ALPHA_MONITOR.yaml using param_xios reading ../lib/SEDNA_ALPHA_MONITOR.yaml using param_xios reading <bound method DataSourceBase.describe of sources: param_xios: args: combine: by_coords concat_dim: y urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param/x_*.nc xarray_kwargs: compat: override coords: minimal data_vars: minimal parallel: true description: SEDNA NEMO parameters from MPI output nav_lon lat fails driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > {'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param/x_*.nc', 'combine': 'by_coords', 'concat_dim': 'y'}} 0 read gridS ['vosaline'] using load_data_xios reading gridS using load_data_xios reading <bound method DataSourceBase.describe of sources: data_xios: args: combine: by_coords concat_dim: time_counter,x,y urlpath: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RUNS/SEDNA-ALPHA-XIOS.22/SEDNA-ALPHA_1d_gridS_*_0[0-5][0-9][0-9].nc xarray_kwargs: compat: override coords: minimal data_vars: minimal drop_variables: !!set deptht_bounds: null depthu_bounds: null nav_lat: null nav_lon: null time_centerd: null time_centered_bounds: null time_counter_bounds: null parallel: true preprocess: !!python/name:core.load.prep '' description: SEDNA NEMO outputs from different xios server driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 177.22788500785828 seconds 0 merging gridS ['vosaline'] 1 read gridT ['votemper'] using load_data_xios reading gridT using load_data_xios reading <bound method DataSourceBase.describe of sources: data_xios: args: combine: by_coords concat_dim: time_counter,x,y urlpath: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RUNS/SEDNA-ALPHA-XIOS.22/SEDNA-ALPHA_1d_gridT_*_0[0-5][0-9][0-9].nc xarray_kwargs: compat: override coords: minimal data_vars: minimal drop_variables: !!set deptht_bounds: null depthu_bounds: null nav_lat: null nav_lon: null time_centerd: null time_centered_bounds: null time_counter_bounds: null parallel: true preprocess: !!python/name:core.load.prep '' description: SEDNA NEMO outputs from different xios server driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 84.70562720298767 seconds 1 merging gridT ['votemper'] took 0.37613654136657715 seconds 2 read gridU ['vozocrtx'] using load_data_xios reading gridU using load_data_xios reading <bound method DataSourceBase.describe of sources: data_xios: args: combine: by_coords concat_dim: time_counter,x,y urlpath: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RUNS/SEDNA-ALPHA-XIOS.22/SEDNA-ALPHA_1d_gridU_*_0[0-5][0-9][0-9].nc xarray_kwargs: compat: override coords: minimal data_vars: minimal drop_variables: !!set deptht_bounds: null depthu_bounds: null nav_lat: null nav_lon: null time_centerd: null time_centered_bounds: null time_counter_bounds: null parallel: true preprocess: !!python/name:core.load.prep '' description: SEDNA NEMO outputs from different xios server driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 77.11606049537659 seconds 2 merging gridU ['vozocrtx'] took 0.34493017196655273 seconds 3 read gridV ['vomecrty'] using load_data_xios reading gridV using load_data_xios reading <bound method DataSourceBase.describe of sources: data_xios: args: combine: by_coords concat_dim: time_counter,x,y urlpath: /ccc/scratch/cont003/gen7420/talandel/ONGOING-RUNS/SEDNA-ALPHA-XIOS.22/SEDNA-ALPHA_1d_gridV_*_0[0-5][0-9][0-9].nc xarray_kwargs: compat: override coords: minimal data_vars: minimal drop_variables: !!set deptht_bounds: null depthu_bounds: null nav_lat: null nav_lon: null time_centerd: null time_centered_bounds: null time_counter_bounds: null parallel: true preprocess: !!python/name:core.load.prep '' description: SEDNA NEMO outputs from different xios server driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 85.0255937576294 seconds 3 merging gridV ['vomecrty'] took 0.3382694721221924 seconds param mask2d will be included in data param nav_lon will be included in data param mask will be included in data param depth will be included in data param nav_lat will be included in data sum_num (13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12) start rechunking with (130, 122, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 48) end of y_rechunk CPU times: user 3min 10s, sys: 31.1 s, total: 3min 41s Wall time: 7min 29s
<xarray.Dataset> Dimensions: (t: 15, x: 6560, y: 6540, z: 150) Coordinates: * t (t) object 2004-06-16 12:00:00 ... 2004-06-30 12:00:00 * y (y) int64 1 2 3 4 5 6 7 8 ... 6534 6535 6536 6537 6538 6539 6540 * x (x) int64 1 2 3 4 5 6 7 8 ... 6554 6555 6556 6557 6558 6559 6560 * z (z) int64 1 2 3 4 5 6 7 8 9 ... 143 144 145 146 147 148 149 150 mask2d (y, x) bool dask.array<chunksize=(130, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> mask (z, y, x) bool dask.array<chunksize=(150, 130, 6560), meta=np.ndarray> depth (z, y, x) float64 dask.array<chunksize=(150, 130, 6560), meta=np.ndarray> nav_lat (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> Data variables: vosaline (t, z, y, x) float32 dask.array<chunksize=(1, 150, 130, 6560), meta=np.ndarray> votemper (t, z, y, x) float32 dask.array<chunksize=(1, 150, 130, 6560), meta=np.ndarray> vozocrtx (t, z, y, x) float32 dask.array<chunksize=(1, 150, 130, 6560), meta=np.ndarray> vomecrty (t, z, y, x) float32 dask.array<chunksize=(1, 150, 130, 6560), meta=np.ndarray>
array([cftime.DatetimeNoLeap(2004, 6, 16, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 17, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 18, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 19, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 20, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 21, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 22, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 23, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 24, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 25, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 26, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 27, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 28, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 29, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 30, 12, 0, 0, 0)], dtype=object)
array([ 1, 2, 3, ..., 6538, 6539, 6540])
array([ 1, 2, 3, ..., 6558, 6559, 6560])
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150])
|
|
|
|
|
|
|
|
|
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
switch:calcswitch,saveswitch,plotswitch True False True data= zoom.FramS(data) #2 Zooming Data
<xarray.Dataset> Dimensions: (t: 15, x: 554, z: 103) Coordinates: * t (t) object 2004-06-16 12:00:00 ... 2004-06-30 12:00:00 y int64 2609 * x (x) int64 3749 3750 3751 3752 3753 ... 4298 4299 4300 4301 4302 * z (z) int64 1 2 3 4 5 6 7 8 9 10 ... 95 96 97 98 99 100 101 102 103 mask2d (x) bool dask.array<chunksize=(554,), meta=np.ndarray> nav_lon (x) float32 dask.array<chunksize=(554,), meta=np.ndarray> mask (z, x) bool dask.array<chunksize=(103, 554), meta=np.ndarray> depth (z, x) float64 dask.array<chunksize=(103, 554), meta=np.ndarray> nav_lat (x) float32 dask.array<chunksize=(554,), meta=np.ndarray> Data variables: vosaline (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray> votemper (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray> vozocrtx (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray> vomecrty (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray>
array([cftime.DatetimeNoLeap(2004, 6, 16, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 17, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 18, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 19, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 20, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 21, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 22, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 23, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 24, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 25, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 26, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 27, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 28, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 29, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 30, 12, 0, 0, 0)], dtype=object)
array(2609)
array([3749, 3750, 3751, ..., 4300, 4301, 4302])
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103])
|
|
|
|
|
|
|
|
|
dtaa= data.drop_vars('vozocrtx') #3 Start computing count: <xarray.Dataset> Dimensions: () Coordinates: y int64 2609 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vozocrtx int64 dask.array<chunksize=(), meta=np.ndarray> vomecrty int64 dask.array<chunksize=(), meta=np.ndarray>
<xarray.Dataset> Dimensions: () Coordinates: y int64 2609 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vozocrtx int64 dask.array<chunksize=(), meta=np.ndarray> vomecrty int64 dask.array<chunksize=(), meta=np.ndarray>
array(2609)
|
|
|
|
nbytes: 10795088 count: <xarray.Dataset> Dimensions: () Coordinates: y int64 2609 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vomecrty int64 dask.array<chunksize=(), meta=np.ndarray>
<xarray.Dataset> Dimensions: (t: 15, x: 554, z: 103) Coordinates: * t (t) object 2004-06-16 12:00:00 ... 2004-06-30 12:00:00 y int64 2609 * x (x) int64 3749 3750 3751 3752 3753 ... 4298 4299 4300 4301 4302 * z (z) int64 1 2 3 4 5 6 7 8 9 10 ... 95 96 97 98 99 100 101 102 103 mask2d (x) bool dask.array<chunksize=(554,), meta=np.ndarray> nav_lon (x) float32 dask.array<chunksize=(554,), meta=np.ndarray> mask (z, x) bool dask.array<chunksize=(103, 554), meta=np.ndarray> depth (z, x) float64 dask.array<chunksize=(103, 554), meta=np.ndarray> nav_lat (x) float32 dask.array<chunksize=(554,), meta=np.ndarray> Data variables: vosaline (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray> votemper (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray> vomecrty (t, z, x) float32 dask.array<chunksize=(1, 103, 554), meta=np.ndarray>
array([cftime.DatetimeNoLeap(2004, 6, 16, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 17, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 18, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 19, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 20, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 21, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 22, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 23, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 24, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 25, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 26, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 27, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 28, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 29, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 30, 12, 0, 0, 0)], dtype=object)
array(2609)
array([3749, 3750, 3751, ..., 4300, 4301, 4302])
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103])
|
|
|
|
|
|
|
|
plots.section(data,path=outputpath,filename=filename,save=savefig,cmap=cmap,clim=clim,clabel=clabel) #5 Plotting SEDNA_section_FramS_Section ../results/rome_SEDNA_ALPHA_MONITOR/22/SEDNA_section_FramS_Section_20040616-20040630.html starts plotting ../results/rome_SEDNA_ALPHA_MONITOR/22/SEDNA_section_FramS_Section_20040616-20040630.html created
data= zoom.BFGS(data) #2 Zooming Data
<xarray.Dataset> Dimensions: (t: 15, y: 2420, z: 95) Coordinates: * t (t) object 2004-06-16 12:00:00 ... 2004-06-30 12:00:00 * y (y) int64 3494 3495 3496 3497 3498 ... 5909 5910 5911 5912 5913 x int64 2281 * z (z) int64 1 2 3 4 5 6 7 8 9 10 ... 86 87 88 89 90 91 92 93 94 95 mask2d (y) bool dask.array<chunksize=(119,), meta=np.ndarray> nav_lon (y) float32 dask.array<chunksize=(119,), meta=np.ndarray> mask (z, y) bool dask.array<chunksize=(95, 119), meta=np.ndarray> depth (z, y) float64 dask.array<chunksize=(95, 119), meta=np.ndarray> nav_lat (y) float32 dask.array<chunksize=(119,), meta=np.ndarray> Data variables: vosaline (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray> votemper (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray> vozocrtx (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray> vomecrty (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray>
array([cftime.DatetimeNoLeap(2004, 6, 16, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 17, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 18, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 19, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 20, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 21, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 22, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 23, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 24, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 25, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 26, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 27, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 28, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 29, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 30, 12, 0, 0, 0)], dtype=object)
array([3494, 3495, 3496, ..., 5911, 5912, 5913])
array(2281)
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95])
|
|
|
|
|
|
|
|
|
dtaa= data.drop_vars('vomecrty') #3 Start computing count: <xarray.Dataset> Dimensions: () Coordinates: x int64 2281 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vozocrtx int64 dask.array<chunksize=(), meta=np.ndarray> vomecrty int64 dask.array<chunksize=(), meta=np.ndarray>
<xarray.Dataset> Dimensions: () Coordinates: x int64 2281 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vozocrtx int64 dask.array<chunksize=(), meta=np.ndarray> vomecrty int64 dask.array<chunksize=(), meta=np.ndarray>
array(2281)
|
|
|
|
nbytes: 43493128 count: <xarray.Dataset> Dimensions: () Coordinates: x int64 2281 Data variables: vosaline int64 dask.array<chunksize=(), meta=np.ndarray> votemper int64 dask.array<chunksize=(), meta=np.ndarray> vozocrtx int64 dask.array<chunksize=(), meta=np.ndarray>
<xarray.Dataset> Dimensions: (t: 15, y: 2420, z: 95) Coordinates: * t (t) object 2004-06-16 12:00:00 ... 2004-06-30 12:00:00 * y (y) int64 3494 3495 3496 3497 3498 ... 5909 5910 5911 5912 5913 x int64 2281 * z (z) int64 1 2 3 4 5 6 7 8 9 10 ... 86 87 88 89 90 91 92 93 94 95 mask2d (y) bool dask.array<chunksize=(119,), meta=np.ndarray> nav_lon (y) float32 dask.array<chunksize=(119,), meta=np.ndarray> mask (z, y) bool dask.array<chunksize=(95, 119), meta=np.ndarray> depth (z, y) float64 dask.array<chunksize=(95, 119), meta=np.ndarray> nav_lat (y) float32 dask.array<chunksize=(119,), meta=np.ndarray> Data variables: vosaline (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray> votemper (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray> vozocrtx (t, z, y) float32 dask.array<chunksize=(1, 95, 119), meta=np.ndarray>
array([cftime.DatetimeNoLeap(2004, 6, 16, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 17, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 18, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 19, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 20, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 21, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 22, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 23, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 24, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 25, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 26, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 27, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 28, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 29, 12, 0, 0, 0), cftime.DatetimeNoLeap(2004, 6, 30, 12, 0, 0, 0)], dtype=object)
array([3494, 3495, 3496, ..., 5911, 5912, 5913])
array(2281)
array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95])
|
|
|
|
|
|
|
|
plots.section(data,path=outputpath,filename=filename,save=savefig,cmap=cmap,clim=clim,clabel=clabel) #5 Plotting SEDNA_section_BFGS_Section
distributed.nanny - WARNING - Restarting worker distributed.stealing - ERROR - Unexpected task state: error Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error tornado.application - ERROR - Exception in callback functools.partial(<bound method IOLoop._discard_future_result of <zmq.eventloop.ioloop.ZMQIOLoop object at 0x2b051f64be90>>, <Task finished coro=<WorkStealing.move_task_confirm() done, defined at /ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py:189> exception=ValueError('Unexpected task state: error')>) Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 741, in _run_callback ret = callback() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 765, in _discard_future_result future.result() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error distributed.stealing - ERROR - Unexpected task state: error Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error tornado.application - ERROR - Exception in callback functools.partial(<bound method IOLoop._discard_future_result of <zmq.eventloop.ioloop.ZMQIOLoop object at 0x2b051f64be90>>, <Task finished coro=<WorkStealing.move_task_confirm() done, defined at /ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py:189> exception=ValueError('Unexpected task state: error')>) Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 741, in _run_callback ret = callback() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 765, in _discard_future_result future.result() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error distributed.nanny - WARNING - Restarting worker distributed.stealing - ERROR - Unexpected task state: error Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error tornado.application - ERROR - Exception in callback functools.partial(<bound method IOLoop._discard_future_result of <zmq.eventloop.ioloop.ZMQIOLoop object at 0x2b051f64be90>>, <Task finished coro=<WorkStealing.move_task_confirm() done, defined at /ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py:189> exception=ValueError('Unexpected task state: error')>) Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 741, in _run_callback ret = callback() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/tornado/ioloop.py", line 765, in _discard_future_result future.result() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/stealing.py", line 259, in move_task_confirm raise ValueError("Unexpected task state: %s" % state) ValueError: Unexpected task state: error distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Worker exceeded 99% memory budget. Restarting distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Restarting worker distributed.nanny - WARNING - Restarting worker distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-b971f738f01c36749236262f2e41e115', 11, 0, 311, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-0342a91fdd85385b86ab799ba0d3c19d', 9, 0, 306, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 2, 0, 476, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-0342a91fdd85385b86ab799ba0d3c19d', 9, 0, 490, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 12, 0, 384, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-0342a91fdd85385b86ab799ba0d3c19d', 4, 0, 335, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 9, 0, 349, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 11, 0, 336, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-b971f738f01c36749236262f2e41e115', 0, 0, 418, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 6, 0, 387, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-e6da983b4b8e7d8bb703b99d70cdba60', 6, 0, 406, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-b971f738f01c36749236262f2e41e115', 3, 0, 402, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-b971f738f01c36749236262f2e41e115', 11, 0, 450, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-0342a91fdd85385b86ab799ba0d3c19d', 6, 0, 302, 0)" distributed.core - ERROR - Exception while handling op heartbeat_worker Traceback (most recent call last): File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/core.py", line 496, in handle_comm result = handler(comm, **msg) File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in heartbeat_worker parent._tasks[key]: duration for key, duration in executing.items() File "/ccc/cont003/home/ifremer/odakatin/monitor/lib/python3.7/site-packages/distributed/scheduler.py", line 3431, in <dictcomp> parent._tasks[key]: duration for key, duration in executing.items() KeyError: "('open_dataset-concatenate-0342a91fdd85385b86ab799ba0d3c19d', 0, 0, 414, 0)"
--------------------------------------------------------------------------- KilledWorker Traceback (most recent call last) <timed eval> in <module> /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py in auto(df, val, savefig, daskreport, outputpath, file_exp) 64 print(command, '#5 Plotting',filename ) 65 with performance_report(filename=daskreport+"_plot_"+step.Value+".html"): ---> 66 filename=eval(command ) 67 print(filename,'created') 68 display(IFrame(filename, width=1000, height=500)) /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py in <module> /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/plots.py in section(ds, path, filename, save, cmap, clim, clabel) 113 cmap={'vosaline': 'Spectral_r', 'votemper': 'jet', 'vomecrty': 'bwr', 'vozocrtx':'bwr'} 114 ds=ds.assign_coords(new_lon=(ds.depth*0+ds.nav_lon)) --> 115 ds=ds.load() 116 plot_list = [ds[varname].where(ds.mask).hvplot.quadmesh( 117 x='new_lon',y='depth' ~/monitor/lib/python3.7/site-packages/xarray/core/dataset.py in load(self, **kwargs) 739 740 # evaluate all the dask arrays simultaneously --> 741 evaluated_data = da.compute(*lazy_data.values(), **kwargs) 742 743 for k, data in zip(lazy_data, evaluated_data): ~/monitor/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs) 561 postcomputes.append(x.__dask_postcompute__()) 562 --> 563 results = schedule(dsk, keys, **kwargs) 564 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) 565 ~/monitor/lib/python3.7/site-packages/distributed/client.py in get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs) 2653 should_rejoin = False 2654 try: -> 2655 results = self.gather(packed, asynchronous=asynchronous, direct=direct) 2656 finally: 2657 for f in futures.values(): ~/monitor/lib/python3.7/site-packages/distributed/client.py in gather(self, futures, errors, direct, asynchronous) 1968 direct=direct, 1969 local_worker=local_worker, -> 1970 asynchronous=asynchronous, 1971 ) 1972 ~/monitor/lib/python3.7/site-packages/distributed/client.py in sync(self, func, asynchronous, callback_timeout, *args, **kwargs) 837 else: 838 return sync( --> 839 self.loop, func, *args, callback_timeout=callback_timeout, **kwargs 840 ) 841 ~/monitor/lib/python3.7/site-packages/distributed/utils.py in sync(loop, func, callback_timeout, *args, **kwargs) 338 if error[0]: 339 typ, exc, tb = error[0] --> 340 raise exc.with_traceback(tb) 341 else: 342 return result[0] ~/monitor/lib/python3.7/site-packages/distributed/utils.py in f() 322 if callback_timeout is not None: 323 future = asyncio.wait_for(future, callback_timeout) --> 324 result[0] = yield future 325 except Exception as exc: 326 error[0] = sys.exc_info() ~/monitor/lib/python3.7/site-packages/tornado/gen.py in run(self) 760 761 try: --> 762 value = future.result() 763 except Exception: 764 exc_info = sys.exc_info() ~/monitor/lib/python3.7/site-packages/distributed/client.py in _gather(self, futures, errors, direct, local_worker) 1827 exc = CancelledError(key) 1828 else: -> 1829 raise exception.with_traceback(traceback) 1830 raise exc 1831 if errors == "skip": KilledWorker: ("('open_dataset-concatenate-b971f738f01c36749236262f2e41e115', 3, 0, 367, 0)", <Worker 'tcp://127.0.0.1:42940', name: 1, memory: 0, processing: 458>)