%matplotlib inline
import pandas as pd
import socket
host = socket.getfqdn()
from core import load, zoom, calc, save,plots,monitor
#reload funcs after updating ./core/*.py
import importlib
importlib.reload(load)
importlib.reload(zoom)
importlib.reload(calc)
importlib.reload(save)
importlib.reload(plots)
importlib.reload(monitor)
<module 'core.monitor' from '/ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py'>
# 'month': = 'JOBID' almost month but not really,
# If you submit the job with job scheduler, above
#below are list of enviroment variable one can pass
#%env local='2"
# local : if True run dask local cluster, if not true, put number of workers
# setted in the 'local'
# if no 'local ' given, local will be setted automatically to 'True'
#%env ychunk='2'
#%env tchunk='2'
# controls chunk. 'False' sets no modification from original netcdf file's chunk.
# ychunk=10 will group the original netcdf file to 10 by 10
# tchunk=1 will chunk the time coordinate one by one
#%env control=FWC_SSH
# name of control file to be used for computation/plots/save/
#%env file_exp=
# 'file_exp': Which 'experiment' name is it?
#. this corresopnds to intake catalog name without path and .yaml
#%env year=
# for Validation, this correspoinds to path/year/month 's year
# for monitoring, this corresponids to 'date' having * means do all files in the monitoring directory
# setting it as *0[0-9] &*1[0-9]& *[2-3][0-9], the job can be separated in three lots.
#%env month=
# for monitoring this corresponds to file path path-XIOS.{month}/
#
#%env save= proceed saving? True or False , Default is setted as True
#%env plot= proceed plotting? True or False , Default is setted as True
#%env calc= proceed computation? or just load computed result? True or False , Default is setted as True
#%env save=False
#%env lazy=False
%%time
# 'savefig': Do we save output in html? or not. keep it true.
savefig=True
client,cluster,control,catalog_url,month,year,daskreport,outputpath = load.set_control(host)
!mkdir -p $outputpath
!mkdir -p $daskreport
client
local True using host= irene5428.c-irene.mg1.tgcc.ccc.cea.fr starting dask cluster on local= True workers 16 10000000000 False rome local cluster starting This code is running on irene5428.c-irene.mg1.tgcc.ccc.cea.fr using SEDNA_DELTA_MONITOR file experiment, read from ../lib/SEDNA_DELTA_MONITOR.yaml on year= 2012 on month= 02 outputpath= ../results/SEDNA_DELTA_MONITOR/ daskreport= ../results/dask/6413746irene5428.c-irene.mg1.tgcc.ccc.cea.fr_SEDNA_DELTA_MONITOR_02M_IceThick/ CPU times: user 557 ms, sys: 127 ms, total: 684 ms Wall time: 20.9 s
Client-01e7918b-1344-11ed-ad96-080038b94039
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
17728407
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 128 | Total memory: 251.06 GiB |
Status: running | Using processes: True |
Scheduler-28555c69-3f13-4e45-868f-3d200653027c
Comm: tcp://127.0.0.1:37406 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 128 |
Started: Just now | Total memory: 251.06 GiB |
Comm: tcp://127.0.0.1:35748 | Total threads: 8 |
Dashboard: http://127.0.0.1:33635/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:39145 | |
Local directory: /tmp/dask-worker-space/worker-qsn1y4l9 |
Comm: tcp://127.0.0.1:44627 | Total threads: 8 |
Dashboard: http://127.0.0.1:33239/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35520 | |
Local directory: /tmp/dask-worker-space/worker-6uc6th20 |
Comm: tcp://127.0.0.1:36276 | Total threads: 8 |
Dashboard: http://127.0.0.1:34414/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:43439 | |
Local directory: /tmp/dask-worker-space/worker-1m286qrv |
Comm: tcp://127.0.0.1:34253 | Total threads: 8 |
Dashboard: http://127.0.0.1:40280/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:33297 | |
Local directory: /tmp/dask-worker-space/worker-3tylkuzl |
Comm: tcp://127.0.0.1:44630 | Total threads: 8 |
Dashboard: http://127.0.0.1:33545/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:38867 | |
Local directory: /tmp/dask-worker-space/worker-mfjwyuak |
Comm: tcp://127.0.0.1:40066 | Total threads: 8 |
Dashboard: http://127.0.0.1:38231/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44260 | |
Local directory: /tmp/dask-worker-space/worker-tf0ewf2g |
Comm: tcp://127.0.0.1:35954 | Total threads: 8 |
Dashboard: http://127.0.0.1:37260/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44179 | |
Local directory: /tmp/dask-worker-space/worker-sc40o1pv |
Comm: tcp://127.0.0.1:46880 | Total threads: 8 |
Dashboard: http://127.0.0.1:35206/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:42876 | |
Local directory: /tmp/dask-worker-space/worker-_8pjlwly |
Comm: tcp://127.0.0.1:45518 | Total threads: 8 |
Dashboard: http://127.0.0.1:34247/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36254 | |
Local directory: /tmp/dask-worker-space/worker-jsjyjv7r |
Comm: tcp://127.0.0.1:45823 | Total threads: 8 |
Dashboard: http://127.0.0.1:43548/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34396 | |
Local directory: /tmp/dask-worker-space/worker-y4fw7z5b |
Comm: tcp://127.0.0.1:38558 | Total threads: 8 |
Dashboard: http://127.0.0.1:35276/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:45233 | |
Local directory: /tmp/dask-worker-space/worker-sbbnnrj9 |
Comm: tcp://127.0.0.1:39550 | Total threads: 8 |
Dashboard: http://127.0.0.1:35290/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:34398 | |
Local directory: /tmp/dask-worker-space/worker-4c62wad9 |
Comm: tcp://127.0.0.1:41251 | Total threads: 8 |
Dashboard: http://127.0.0.1:39590/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:44626 | |
Local directory: /tmp/dask-worker-space/worker-uf8jbml7 |
Comm: tcp://127.0.0.1:33408 | Total threads: 8 |
Dashboard: http://127.0.0.1:42250/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:40086 | |
Local directory: /tmp/dask-worker-space/worker-npc99ky9 |
Comm: tcp://127.0.0.1:36117 | Total threads: 8 |
Dashboard: http://127.0.0.1:33362/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:36983 | |
Local directory: /tmp/dask-worker-space/worker-ov_z28cx |
Comm: tcp://127.0.0.1:37893 | Total threads: 8 |
Dashboard: http://127.0.0.1:34163/status | Memory: 15.69 GiB |
Nanny: tcp://127.0.0.1:35020 | |
Local directory: /tmp/dask-worker-space/worker-dhx7vlhf |
df=load.controlfile(control)
#Take out 'later' tagged computations
#df=df[~df['Value'].str.contains('later')]
df
Value | Inputs | Equation | Zone | Plot | Colourmap | MinMax | Unit | Oldname | Unnamed: 10 | |
---|---|---|---|---|---|---|---|---|---|---|
IceThickness | icemod.sivolu | (data.sivolu.where(data.sivolu >0)).to_dataset... | ALL | maps | Spectral | (0,5) | m | M-4 |
Each computation consists of
%%time
import os
calcswitch=os.environ.get('calc', 'True')
lazy=os.environ.get('lazy','False' )
loaddata=((df.Inputs != '').any())
print('calcswitch=',calcswitch,'df.Inputs != nothing',loaddata, 'lazy=',lazy)
data = load.datas(catalog_url,df.Inputs,month,year,daskreport,lazy=lazy) if ((calcswitch=='True' )*loaddata) else 0
data
calcswitch= True df.Inputs != nothing True lazy= False ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading ../lib/SEDNA_DELTA_MONITOR.yaml using param_xios reading <bound method DataSourceBase.describe of sources: param_xios: args: combine: nested concat_dim: y urlpath: /ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc xarray_kwargs: compat: override coords: minimal data_vars: minimal parallel: true description: SEDNA NEMO parameters from MPI output nav_lon lat fails driver: intake_xarray.netcdf.NetCDFSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > {'name': 'param_xios', 'container': 'xarray', 'plugin': ['netcdf'], 'driver': ['netcdf'], 'description': 'SEDNA NEMO parameters from MPI output nav_lon lat fails', 'direct_access': 'forbid', 'user_parameters': [{'name': 'path', 'description': 'file coordinate', 'type': 'str', 'default': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/MESH/SEDNA_mesh_mask_Tgt_20210423_tsh10m_L1/param'}], 'metadata': {}, 'args': {'urlpath': '/ccc/work/cont003/gen7420/odakatin/CONFIGS/SEDNA/SEDNA-I/SEDNA_Domain_cfg_Tgt_20210423_tsh10m_L1/param_f32/x_*.nc', 'combine': 'nested', 'concat_dim': 'y'}} 0 read icemod ['sivolu'] using load_data_xios_kerchunk reading icemod using load_data_xios_kerchunk reading <bound method DataSourceBase.describe of sources: data_xios_kerchunk: args: consolidated: false storage_options: fo: file:////ccc/cont003/home/ra5563/ra5563/catalogue/DELTA/201202/icemod_0[0-5][0-9][0-9].json target_protocol: file urlpath: reference:// description: CREG025 NEMO outputs from different xios server in kerchunk format driver: intake_xarray.xzarr.ZarrSource metadata: catalog_dir: /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/../lib/ > took 31.52891778945923 seconds 0 merging icemod ['sivolu'] param nav_lat will be included in data param nav_lon will be included in data param mask2d will be included in data ychunk= 10 calldatas_y_rechunk sum_num (13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12) start rechunking with (130, 122, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 48) end of y_rechunk CPU times: user 33.1 s, sys: 2.56 s, total: 35.7 s Wall time: 55.3 s
<xarray.Dataset> Dimensions: (t: 28, y: 6540, x: 6560) Coordinates: nav_lat (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> time_centered (t) object dask.array<chunksize=(1,), meta=np.ndarray> * t (t) object 2012-02-01 12:00:00 ... 2012-02-28 12:00:00 * y (y) int64 1 2 3 4 5 6 7 ... 6535 6536 6537 6538 6539 6540 * x (x) int64 1 2 3 4 5 6 7 ... 6555 6556 6557 6558 6559 6560 mask2d (y, x) bool dask.array<chunksize=(130, 6560), meta=np.ndarray> Data variables: sivolu (t, y, x) float32 dask.array<chunksize=(1, 130, 6560), meta=np.ndarray> Attributes: (12/26) CASE: DELTA CONFIG: SEDNA Conventions: CF-1.6 DOMAIN_dimensions_ids: [2, 3] DOMAIN_halo_size_end: [0, 0] DOMAIN_halo_size_start: [0, 0] ... ... nj: 13 output_frequency: 1d start_date: 20090101 timeStamp: 2022-Jan-18 16:51:17 GMT title: ice variables uuid: 56b165e2-bdda-4b33-a2e9-04a59f3d06e9
%%time
monitor.auto(df,data,savefig,daskreport,outputpath,file_exp='SEDNA'
)
#calc= True #save= True #plot= False monitor.optimize_dataset(data) Value='IceThickness' Zone='ALL' Plot='maps' cmap='Spectral' clabel='m' clim= (0, 5) outputpath='../results/SEDNA_DELTA_MONITOR/' nc_outputpath='../nc_results/SEDNA_DELTA_MONITOR/' filename='SEDNA_maps_ALL_IceThickness' #3 Start computing dtaa= (data.sivolu.where(data.sivolu >0)).to_dataset(name='sivolu').chunk({ 't': -1 }).unify_chunks().persist() monitor.optimize_dataset(dtaa)
<xarray.Dataset> Dimensions: (y: 6540, x: 6560, t: 28) Coordinates: nav_lat (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> nav_lon (y, x) float32 dask.array<chunksize=(130, 6560), meta=np.ndarray> time_centered (t) object dask.array<chunksize=(28,), meta=np.ndarray> * t (t) object 2012-02-01 12:00:00 ... 2012-02-28 12:00:00 * y (y) int64 1 2 3 4 5 6 7 ... 6535 6536 6537 6538 6539 6540 * x (x) int64 1 2 3 4 5 6 7 ... 6555 6556 6557 6558 6559 6560 mask2d (y, x) bool dask.array<chunksize=(130, 6560), meta=np.ndarray> Data variables: sivolu (t, y, x) float32 dask.array<chunksize=(28, 130, 6560), meta=np.ndarray>
#4 Saving SEDNA_maps_ALL_IceThickness dtaa=save.datas(data,plot=Plot,path=nc_outputpath,filename=filename) start saving data saving data in a file
--------------------------------------------------------------------------- UnboundLocalError Traceback (most recent call last) File <timed eval>:1, in <module> File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/monitor.py:84, in auto(df, val, savefig, daskreport, outputpath, file_exp) 82 print('dtaa=save.datas(data,plot=Plot,path=nc_outputpath,filename=filename)' ) 83 with performance_report(filename=daskreport+"_save_"+step.Value+".html"): ---> 84 save.datas(data,plot=Plot,path=nc_outputpath,filename=filename) 85 # 5. Plot 86 if plotswitch=='True': File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:24, in datas(data, plot, path, filename) 22 twoD(data,path,filename,nested=False) 23 else : ---> 24 twoD(data,path,filename) 25 return None File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:57, in twoD(data, path, filename, nested) 55 print('saving data in a file') 56 filesave=path+filename ---> 57 return to_mfnetcdf_map(data,prefix=filesave, nested=nested) File /ccc/work/cont003/gen7420/odakatin/monitor-sedna/notebook/core/save.py:219, in to_mfnetcdf_map(ds, prefix, nested) 217 slices.append(slice(start, stop)) 218 start = stop --> 219 chunk_slices[dim] = slices 220 for i in chunk_slices['t']: 221 print(i) UnboundLocalError: local variable 'slices' referenced before assignment