eldamwl_plot.py

Wed, 28 Aug 2024 11:28:24 +0000

author
pilar <pilar.gumaclaramunt@cnr.it>
date
Wed, 28 Aug 2024 11:28:24 +0000
changeset 4
75213aef70b8
parent 2
763a7ac22031
permissions
-rwxr-xr-x

Changed the timestamps to UTC and the filenames of the plots

pilar@2 1 # import nc as nc
pilar@2 2 # import xarray as xr
pilar@0 3 from config import *
pilar@0 4 import plotly.graph_objects as go
pilar@0 5 from plotly.subplots import make_subplots
pilar@0 6 import netCDF4 as nc
pilar@4 7 from datetime import datetime, timezone
pilar@0 8 import numpy as np
pilar@0 9 import sys
pilar@0 10 from os.path import exists
pilar@0 11
pilar@0 12
pilar@4 13 def check_group_presence(nc_file_path, group_name):
pilar@4 14 with nc.Dataset(nc_file_path, 'r') as dataset:
pilar@4 15 if group_name in dataset.groups:
pilar@4 16 print(f'{group_name} group exists')
pilar@4 17 return True
pilar@4 18 else:
pilar@4 19 print(f'{group_name} group does not exist')
pilar@4 20 return False
pilar@4 21
pilar@4 22
pilar@0 23 def read_variable_in_group(nc_file_path, group_name, variable_name):
pilar@0 24 try:
pilar@0 25 with nc.Dataset(nc_file_path, 'r') as dataset:
pilar@0 26 # Find the group
pilar@0 27 group = dataset.groups[group_name]
pilar@0 28
pilar@0 29 # Access the variable
pilar@0 30 try:
pilar@0 31 variable = group.variables[variable_name]
pilar@0 32 dimensions = variable.dimensions
pilar@0 33 # print("Dimensions:", dimensions)
pilar@0 34 # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim}
pilar@0 35
pilar@0 36 # variable_metadata.add = variable.long_name
pilar@0 37 variable_metadata = \
pilar@0 38 {'name': variable.name.capitalize(),
pilar@0 39 'long_name': variable.long_name.capitalize(),
pilar@0 40 'units': variable.units,
pilar@0 41 'ndim': variable.ndim}
pilar@0 42
pilar@0 43 # Create an index array for slicing the variable
pilar@0 44 index_array = [slice(None)] * variable.ndim
pilar@0 45 # Access the variable data using the index array
pilar@0 46 variable_data = variable[index_array]
pilar@0 47
pilar@0 48 # Fetch the dimension values
pilar@0 49 dimension_values = {}
pilar@0 50 for dim_name in dimensions:
pilar@0 51 dim_values = group.variables[dim_name][:]
pilar@0 52 dimension_values[dim_name] = dim_values
pilar@0 53 # Adjust the index array for slicing
pilar@0 54 index_array[variable.dimensions.index(dim_name)] = 0
pilar@0 55
pilar@0 56 # Now you have the variable data and its associated dimensions in a dictionary
pilar@0 57 result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata}
pilar@0 58
pilar@0 59 # Print or use the result as needed
pilar@0 60 # print(result)
pilar@0 61 return result
pilar@0 62
pilar@0 63 except KeyError:
pilar@0 64 print("The specified variable does not exist in the group.")
pilar@0 65
pilar@0 66 except FileNotFoundError:
pilar@0 67 print(f"The file '{nc_file_path}' does not exist.")
pilar@0 68 except Exception as e:
pilar@0 69 print(f"The group {e} does not exist.")
pilar@0 70
pilar@0 71
pilar@2 72 def read_global_attribute(nc_file_path, attribute_name):
pilar@2 73 # Open the NetCDF file in read mode
pilar@2 74 with nc.Dataset(nc_file_path, 'r') as dataset:
pilar@2 75 # with netCDF4.Dataset(nc_file_path, 'r') as nc:
pilar@2 76 # Check if the attribute exists
pilar@2 77 if attribute_name in dataset.ncattrs():
pilar@2 78 # Read the attribute value
pilar@2 79 attribute_value = dataset.getncattr(attribute_name)
pilar@2 80 return attribute_value
pilar@2 81 else:
pilar@2 82 print(f"Attribute '{attribute_name}' not found in the NetCDF file.")
pilar@2 83
pilar@2 84
pilar@0 85 def read_variable_in_group2(nc_file_path, group_name, variable_name):
pilar@0 86 try:
pilar@0 87 with nc.Dataset(nc_file_path, 'r') as dataset:
pilar@0 88 # Find the group
pilar@0 89 group = dataset.groups[group_name]
pilar@0 90
pilar@0 91 # Access the variable
pilar@0 92 try:
pilar@0 93 variable = group.variables[variable_name]
pilar@0 94 dimensions = variable.dimensions
pilar@0 95 # print("Dimensions:", dimensions)
pilar@0 96 # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim}
pilar@0 97
pilar@0 98 # variable_metadata.add = variable.long_name
pilar@0 99 variable_metadata = \
pilar@0 100 {'name': variable.name.capitalize(),
pilar@0 101 'long_name': variable.long_name.capitalize(),
pilar@0 102 'units': variable.units,
pilar@0 103 'ndim': variable.ndim}
pilar@0 104
pilar@0 105 # Create an index array for slicing the variable
pilar@0 106 index_array = [slice(None)] * variable.ndim
pilar@0 107 # Access the variable data using the index array
pilar@0 108 variable_data = variable[index_array]
pilar@0 109
pilar@0 110 # Fetch the dimension values
pilar@0 111 dimension_values = {}
pilar@0 112 for dim_name in dimensions:
pilar@0 113 dim_values = group.variables[dim_name][:]
pilar@2 114 dimension_values[dim_name] = dim_values.tolist()
pilar@0 115 # Adjust the index array for slicing
pilar@0 116 index_array[variable.dimensions.index(dim_name)] = 0
pilar@0 117
pilar@0 118 # Now you have the variable data and its associated dimensions in a dictionary
pilar@0 119 # result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata}
pilar@0 120
pilar@0 121 # Print or use the result as needed
pilar@0 122 # print(result)
pilar@0 123 return variable_data, dimension_values, variable_metadata
pilar@0 124
pilar@0 125 except KeyError:
pilar@0 126 print(f"The variable {variable_name} does not exist in the {group_name} group.")
pilar@0 127 except FileNotFoundError:
pilar@0 128 print(f"The file {nc_file_path} does not exist.")
pilar@0 129 except Exception as e:
pilar@0 130 print(f"The group {e} does not exist.")
pilar@0 131
pilar@0 132
pilar@0 133 def plot_vertical_profiles_plotly(variables, data, dimensions, y_array, positive_error_array, negative_error_array,
pilar@0 134 xaxis_label, yaxis_label, title, timeinrows):
pilar@0 135 """
pilar@0 136 ToDo Complete
pilar@0 137 Plot vertical profiles using plotly
pilar@0 138
pilar@0 139 Parameters:
pilar@0 140 -
pilar@0 141
pilar@0 142 Returns:
pilar@0 143 - fig: html
pilar@0 144 """
pilar@0 145
pilar@0 146 # Set number of columns, number of rows, and subtitles if needed
pilar@0 147 ncols = len(variables)
pilar@0 148 subtitles = None
pilar@0 149 if timeinrows is True:
pilar@0 150 nrows = len(dimensions['time'])
pilar@0 151 if nrows > 1:
pilar@0 152 for t in range(nrows):
pilar@0 153 for v in range(ncols):
pilar@0 154 if subtitles is None:
pilar@0 155 subtitles = [str(datetime.fromtimestamp(dimensions['time'][t]))]
pilar@0 156 else:
pilar@0 157 subtitles += [str(datetime.fromtimestamp(dimensions['time'][t]))]
pilar@0 158 else:
pilar@0 159 nrows = 1
pilar@0 160 subtitles = ''
pilar@0 161
pilar@0 162 # Create figure
pilar@0 163 fig = make_subplots(rows=nrows, cols=ncols,
pilar@0 164 subplot_titles=subtitles,
pilar@0 165 shared_yaxes=True)
pilar@0 166
pilar@0 167 # Define colours
pilar@0 168 clrs = [['rgb(0,35,255)', 'rgb(0,20,150)', 'rgb(0,170,250)'], # UV
pilar@0 169 ['rgb(90,210,50)', 'rgb(40,90,25)', 'rgb(120,250,80)'], # VIS
pilar@0 170 ['rgb(250,30,30)', 'rgb(200,25,30)', 'rgb(250,125,0)']] # IR
pilar@0 171 clrs = np.array(clrs)
pilar@0 172
pilar@0 173 # Plot
pilar@0 174 for v in range(len(variables)):
pilar@0 175 for t in range(len(dimensions['time'])):
pilar@0 176 for w in range(len(dimensions['wavelength'])):
pilar@0 177 tn = str(datetime.fromtimestamp(dimensions['time'][t]))
pilar@0 178 wn = str(dimensions['wavelength'][w])
pilar@2 179 if timeinrows:
pilar@0 180 lgnd = wn + ' nm'
pilar@0 181 rown = t + 1
pilar@0 182 colorind = 0
pilar@2 183 # subtitle = str(datetime.fromtimestamp(dimensions['time'][t]))
pilar@0 184 else:
pilar@0 185 lgnd = wn + ' nm - ' + tn
pilar@0 186 rown = 1
pilar@0 187 colorind = t
pilar@0 188
pilar@0 189 # colour definition
pilar@0 190 if abs(dimensions['wavelength'][w] - 1064) < 1:
pilar@0 191 clr = clrs[2, colorind]
pilar@0 192 # clr = 'rgb(235,70,20)' # 'rgba(215,27,96,0.4)'
pilar@0 193 elif abs(dimensions['wavelength'][w] - 532) < 1:
pilar@0 194 clr = clrs[1, colorind]
pilar@0 195 elif abs(dimensions['wavelength'][w] - 355) < 1:
pilar@0 196 clr = clrs[0, colorind]
pilar@0 197 else:
pilar@0 198 clr = 'rgb(0,0,0)'
pilar@0 199
pilar@0 200 data_values = data[variables[v]][w, t, :]
pilar@2 201 if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked:
pilar@2 202 vertical_levels = y_array["data"][t, :]
pilar@2 203 error_values = positive_error_array[variables[v]][w, t, :]
pilar@2 204
pilar@2 205 # Add trace
pilar@2 206 fig.add_trace(go.Scatter(
pilar@2 207 x=data_values,
pilar@2 208 y=vertical_levels,
pilar@2 209 error_x=dict(array=error_values, width=0, thickness=0.1),
pilar@2 210 # ToDo Include distinction between positive and negative errors
pilar@2 211 mode='lines',
pilar@2 212 line=dict(width=2, color=clr),
pilar@2 213 name=lgnd),
pilar@2 214 row=rown, col=v + 1,
pilar@2 215 )
pilar@2 216
pilar@2 217 # ToDo Check formatting options (showlegend, mapbox)
pilar@2 218 fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1)
pilar@2 219 fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown,
pilar@2 220 col=v + 1)
pilar@2 221
pilar@2 222 fig.update_xaxes(ticks="outside")
pilar@2 223 fig.update_yaxes(ticks="outside", row=rown, col=v + 1)
pilar@2 224 # ToDo check tickformat (i.e. tickformat=":.2e")
pilar@2 225 if v == 0:
pilar@2 226 fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1)
pilar@2 227
pilar@2 228 # ToDo add boxes
pilar@2 229 a = fig.layout['xaxis']['domain']
pilar@2 230 # print(a)
pilar@2 231 b = fig.layout['yaxis']['domain']
pilar@2 232 # print(b)
pilar@2 233 # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1)
pilar@2 234 # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1)
pilar@2 235 # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1)
pilar@2 236 # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1)
pilar@2 237 # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1],
pilar@2 238 # line=dict(color="grey", width=2, ), row=rown, col=v+1)
pilar@2 239 # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0,
pilar@2 240 # line=dict(color="blue", width=2, ), row=rown, col=v+1)
pilar@2 241 # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0,
pilar@2 242 # line=dict(color="red", width=2, ), row=rown, col=v+1)
pilar@2 243 # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1,
pilar@2 244 # line=dict(color="green", width=2, ), row=rown, col=v+1)
pilar@2 245 # fig.add_hline(y=0, line=dict(color="yellow", width=2))
pilar@2 246 # print(fig.data[0]['x'])
pilar@2 247
pilar@2 248 # Set axis labels and title
pilar@2 249 fig.update_layout(
pilar@2 250 yaxis_title=yaxis_label,
pilar@2 251 title=title,
pilar@2 252 template="seaborn", # You can change the template as per your preference
pilar@2 253 # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation',
pilar@2 254 # 'xgridoff', 'ygridoff', 'gridon', 'none']
pilar@2 255 height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400),
pilar@2 256 )
pilar@2 257
pilar@2 258 return fig
pilar@2 259
pilar@2 260
pilar@2 261 def plot_vertical_profiles_plotly_allres(variables, data, dimensions, y_array, positive_error_array,
pilar@2 262 negative_error_array, xaxis_label, yaxis_label, title):
pilar@2 263 """
pilar@2 264 ToDo Complete
pilar@2 265 Plot vertical profiles using plotly
pilar@2 266
pilar@2 267 Parameters:
pilar@2 268 -
pilar@2 269
pilar@2 270 Returns:
pilar@2 271 - fig: html
pilar@2 272 """
pilar@2 273
pilar@2 274 # Set number of columns, number of rows, and subtitles if needed
pilar@2 275 ncols = len(variables)
pilar@2 276 nrows = 1
pilar@2 277 subtitles = ''
pilar@0 278
pilar@2 279 # Create figure
pilar@2 280 fig = make_subplots(rows=nrows, cols=ncols,
pilar@2 281 subplot_titles=subtitles,
pilar@2 282 shared_yaxes=True)
pilar@2 283
pilar@2 284 # Define colours
pilar@2 285 clrs = [['rgb(0,35,255)', 'rgb(0,20,150)'], # UV
pilar@2 286 ['rgb(90,210,50)', 'rgb(40,90,25)'], # VIS
pilar@2 287 ['rgb(250,30,30)', 'rgb(130,30,10)']] # IR
pilar@2 288 clrs = np.array(clrs)
pilar@2 289
pilar@2 290 # Plot
pilar@2 291 for v in range(len(variables)):
pilar@2 292 for t in range(len(dimensions['time'])):
pilar@2 293 for w in range(len(dimensions['wavelength_res'])):
pilar@2 294 wl = float(dimensions['wavelength_res'][w].split(' ')[0])
pilar@2 295 wn = str(dimensions['wavelength_res'][w])
pilar@2 296 lgnd = wn.replace(' ', ' nm ')
pilar@2 297 rown = 1
pilar@2 298 if wn.find('LR') > 0:
pilar@2 299 colorind = 0
pilar@2 300 else:
pilar@2 301 colorind = 1
pilar@2 302
pilar@2 303 # colour definition
pilar@2 304 if abs(wl - 1064) < 1:
pilar@2 305 clr = clrs[2, colorind]
pilar@2 306 elif abs(wl - 532) < 1:
pilar@2 307 clr = clrs[1, colorind]
pilar@2 308 elif abs(wl - 355) < 1:
pilar@2 309 clr = clrs[0, colorind]
pilar@2 310 else:
pilar@2 311 clr = 'rgb(0,0,0)'
pilar@0 312
pilar@2 313 data_values = data[variables[v]][w, t, :]
pilar@2 314 if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked:
pilar@2 315 vertical_levels = y_array["data"][t, :]
pilar@2 316 error_values = positive_error_array[variables[v]][w, t, :]
pilar@2 317 # Add trace
pilar@2 318 fig.add_trace(go.Scatter(
pilar@2 319 x=data_values,
pilar@2 320 y=vertical_levels,
pilar@2 321 error_x=dict(array=error_values, width=0, thickness=0.1),
pilar@2 322 # ToDo Include distinction between positive and negative errors
pilar@2 323 mode='lines',
pilar@2 324 line=dict(width=2, color=clr),
pilar@2 325 name=lgnd),
pilar@2 326 row=rown, col=v + 1,
pilar@2 327 )
pilar@2 328
pilar@2 329 # ToDo Check formatting options (showlegend, mapbox)
pilar@2 330 fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1)
pilar@2 331 fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown,
pilar@2 332 col=v + 1)
pilar@0 333
pilar@2 334 fig.update_xaxes(ticks="outside")
pilar@2 335 fig.update_yaxes(ticks="outside", row=rown, col=v + 1)
pilar@2 336 # ToDo check tickformat (i.e. tickformat=":.2e")
pilar@2 337 if v == 0:
pilar@2 338 fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1)
pilar@2 339
pilar@2 340 # ToDo add boxes
pilar@2 341 a = fig.layout['xaxis']['domain']
pilar@2 342 # print(a)
pilar@2 343 b = fig.layout['yaxis']['domain']
pilar@2 344 # print(b)
pilar@2 345 # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1)
pilar@2 346 # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1)
pilar@2 347 # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1)
pilar@2 348 # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1)
pilar@2 349 # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1],
pilar@2 350 # line=dict(color="grey", width=2, ), row=rown, col=v+1)
pilar@2 351 # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0,
pilar@2 352 # line=dict(color="blue", width=2, ), row=rown, col=v+1)
pilar@2 353 # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0,
pilar@2 354 # line=dict(color="red", width=2, ), row=rown, col=v+1)
pilar@2 355 # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1,
pilar@2 356 # line=dict(color="green", width=2, ), row=rown, col=v+1)
pilar@2 357 # fig.add_hline(y=0, line=dict(color="yellow", width=2))
pilar@2 358 # print(fig.data[0]['x'])
pilar@0 359
pilar@0 360 # Set axis labels and title
pilar@0 361 fig.update_layout(
pilar@0 362 yaxis_title=yaxis_label,
pilar@0 363 title=title,
pilar@0 364 template="seaborn", # You can change the template as per your preference
pilar@0 365 # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation',
pilar@0 366 # 'xgridoff', 'ygridoff', 'gridon', 'none']
pilar@0 367 height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400),
pilar@0 368 )
pilar@0 369
pilar@0 370 return fig
pilar@0 371
pilar@0 372
pilar@0 373 # Files for testing
pilar@2 374 # Backscatter and extinction, no error:
pilar@2 375 # netcdf_file = 'hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc'
pilar@2 376 # Extinction with error:
pilar@2 377 # netcdf_file = 'hpb_012_0000627_202308240200_202308240400_20230824hpb0200_ELDAmwl_v0.0.1.nc'
pilar@4 378 # v0.0.3
pilar@4 379 # pot_012_0000721_202406081503_202406081601_20240608pot150N_ELDAmwl_v0.0.3.nc
pilar@0 380
pilar@0 381
pilar@0 382 try:
pilar@0 383 netcdf_file = sys.argv[1]
pilar@0 384 except:
pilar@0 385 print('Syntax is incorrect, you need to pass the filename of the file to be plotted.\n'
pilar@0 386 'Example: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc')
pilar@0 387 exit()
pilar@0 388
pilar@2 389 # Print help
pilar@2 390 if netcdf_file == '-h' or netcdf_file == '--h':
pilar@2 391 print('Syntax: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc')
pilar@2 392 exit()
pilar@2 393
pilar@0 394 netcdf_file_path = netcdf_path + netcdf_file
pilar@0 395
pilar@0 396 file_exists = exists(netcdf_file_path)
pilar@0 397 if not file_exists:
pilar@0 398 print('The file that you provided does not exist.')
pilar@0 399 exit()
pilar@0 400
pilar@0 401 # Groups of data in the NetCDF file (low and high resolution)
pilar@0 402 groups = ['lowres_products', 'highres_products']
pilar@0 403 groups_names = ['Low resolution', 'High resolution']
pilar@0 404
pilar@0 405 # Variables that we want to plot
pilar@2 406 variable = ['backscatter', 'extinction', 'lidarratio', 'volumedepolarization',
pilar@2 407 'particledepolarization'] # ToDo Test particledepolarization when available
pilar@0 408 # Errors associated to the variables
pilar@0 409 variable_error = ['error_backscatter', 'error_extinction', 'error_lidarratio',
pilar@0 410 'positive_systematic_error_volumedepolarization', 'error_particledepolarization']
pilar@0 411 variable_negative_error_volumedepol = 'negative_systematic_error_volumedepolarization'
pilar@2 412 # ToDo Consider adding other variables:
pilar@0 413 # cloud_mask (time, level)
pilar@0 414
pilar@2 415 # Read global attributes
pilar@2 416 station_id = read_global_attribute(netcdf_file_path, 'station_ID')
pilar@2 417 station_location = read_global_attribute(netcdf_file_path, 'location')
pilar@0 418
pilar@2 419 # Read data
pilar@4 420 h = 0
pilar@0 421 for g in range(len(groups)):
pilar@4 422 if check_group_presence(netcdf_file_path, groups[g]):
pilar@4 423 # Read the altitude variable
pilar@4 424 altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude')
pilar@0 425
pilar@4 426 if altitude['metadata']['units'] == 'm':
pilar@4 427 altitude['data'] = altitude['data'] / 1000.0
pilar@4 428 altitude['metadata']['units'] = 'km'
pilar@0 429
pilar@4 430 # Initialize variables
pilar@4 431 data = None
pilar@4 432 variables_with_data = None
pilar@4 433 variables_axis = None
pilar@0 434
pilar@4 435 # Read the data
pilar@4 436 for v in range(len(variable)):
pilar@4 437 # Initialize var
pilar@4 438 var = None
pilar@0 439
pilar@4 440 try:
pilar@4 441 var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v])
pilar@4 442 except TypeError:
pilar@4 443 print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object")
pilar@0 444
pilar@4 445 if var is not None: # If the variable that we're trying to read exists
pilar@4 446 if var.max() != var.min(): # If there is data in the array
pilar@4 447 if variables_with_data is None:
pilar@4 448 variables_with_data = [variable[v]]
pilar@4 449 variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]']
pilar@4 450 else:
pilar@4 451 variables_with_data += [variable[v]]
pilar@4 452 variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]']
pilar@0 453
pilar@4 454 var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g],
pilar@4 455 variable_error[v])
pilar@4 456 if variable[v] == 'volumedepolarization':
pilar@4 457 var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g],
pilar@4 458 variable_negative_error_volumedepol)
pilar@4 459 else:
pilar@4 460 var_error_negative = var_error
pilar@0 461
pilar@4 462 # Add read data to the data variable to have it all grouped
pilar@4 463 if data is None:
pilar@4 464 # Start populating data and data_error
pilar@4 465 data = {variable[v]: var}
pilar@4 466 data_error_positive = {variable[v]: var_error}
pilar@4 467 data_error_negative = {variable[v]: var_error_negative}
pilar@4 468 else:
pilar@4 469 # Add more data to data and data_error
pilar@4 470 data[variable[v]] = var
pilar@4 471 data_error_positive[variable[v]] = var_error
pilar@4 472 data_error_negative[variable[v]] = var_error_negative
pilar@0 473 else:
pilar@4 474 print(f'There is no data for {variable[v]} in the {groups_names[g]} group.')
pilar@0 475
pilar@4 476 dimensions = dim
pilar@0 477
pilar@4 478 # Save lowres and highres in variables to plot them together afterwards
pilar@4 479 if groups[g] == 'lowres_products':
pilar@4 480 altitude_lr = altitude
pilar@4 481 dimensions_lr = dim
pilar@4 482 data_lr = data
pilar@4 483 data_error_positive_lr = data_error_positive
pilar@4 484 data_error_negative_lr = data_error_negative
pilar@4 485 variables_with_data_lr = variables_with_data
pilar@4 486 variables_axis_lr = variables_axis
pilar@4 487 if groups[g] == 'highres_products':
pilar@4 488 altitude_hr = altitude
pilar@4 489 dimensions_hr = dim
pilar@4 490 data_hr = data
pilar@4 491 data_error_positive_hr = data_error_positive
pilar@4 492 data_error_negative_hr = data_error_negative
pilar@4 493 variables_with_data_hr = variables_with_data
pilar@4 494 variables_axis_hr = variables_axis
pilar@2 495
pilar@4 496 # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows
pilar@4 497 # If interactive is set to False, proceed with default values
pilar@4 498 if h == 0:
pilar@4 499 # Initialize
pilar@4 500 timeinplots = True
pilar@4 501 timeinrows = False
pilar@4 502 # If interactive (config.py) = True, ask user how he/she wants the plot
pilar@4 503 if interactive and len(dimensions['time']) > 1:
pilar@0 504 answer = input(
pilar@2 505 f"There are {len(dimensions['time'])} temporal profiles, "
pilar@4 506 f"do you want to plot them on different plots [y/n]? ")
pilar@4 507 if answer[0:1].lower() == 'n':
pilar@4 508 timeinplots = False
pilar@4 509 if not timeinplots:
pilar@4 510 answer = input(
pilar@4 511 f"There are {len(dimensions['time'])} temporal profiles, "
pilar@4 512 f"do you want to plot them on different rows [y/n]? ")
pilar@4 513 if answer[0:1].lower() == 'y':
pilar@4 514 timeinrows = True
pilar@4 515
pilar@4 516 if variables_with_data is not None:
pilar@4 517 timeiter = dimensions['time']
pilar@4 518 if timeinplots:
pilar@4 519 # Default plotting
pilar@4 520 dimensions_t = dimensions
pilar@4 521 for t in range(len(dimensions['time'])):
pilar@4 522 dimensions_t['time'] = [timeiter[t]]
pilar@4 523 for vr in range(len(variables_with_data)):
pilar@4 524 if vr == 0:
pilar@4 525 data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]}
pilar@4 526 else:
pilar@4 527 data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :]
pilar@4 528
pilar@4 529 title = station_location + " (" + station_id.upper() + ") - " + str(
pilar@4 530 datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime(
pilar@4 531 '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL " + groups_names[g]
pilar@0 532
pilar@4 533 # Timestamps for the filename
pilar@4 534 t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M')
pilar@4 535 if t + 1 < len(timeiter):
pilar@4 536 t1 = datetime.fromtimestamp(
pilar@4 537 timeiter[t+1], tz=timezone.utc).strftime('%Y%m%d%H%M')
pilar@0 538 else:
pilar@4 539 t1 = netcdf_file[29:41]
pilar@4 540
pilar@4 541 filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_" + \
pilar@4 542 groups[g] + ".html"
pilar@0 543
pilar@4 544 fig = plot_vertical_profiles_plotly(
pilar@4 545 variables=variables_with_data,
pilar@4 546 data=data_t,
pilar@4 547 dimensions=dimensions_t,
pilar@4 548 y_array=altitude,
pilar@4 549 positive_error_array=data_error_positive,
pilar@4 550 negative_error_array=data_error_negative,
pilar@4 551 xaxis_label=variables_axis,
pilar@4 552 yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']',
pilar@4 553 title=title,
pilar@4 554 timeinrows=True
pilar@4 555 )
pilar@4 556
pilar@4 557 # Show the plot (in Jupyter Notebook or Jupyter Lab)
pilar@4 558 # fig.show()
pilar@4 559
pilar@4 560 # Save the plot
pilar@4 561 fig.write_html(html_path + filename)
pilar@4 562
pilar@4 563 else:
pilar@4 564 # Plotting in one plot (even if there are different time profiles)
pilar@0 565 fig = plot_vertical_profiles_plotly(
pilar@4 566 variables_with_data,
pilar@4 567 data,
pilar@4 568 dimensions,
pilar@4 569 altitude,
pilar@4 570 data_error_positive,
pilar@4 571 data_error_negative,
pilar@0 572 xaxis_label=variables_axis,
pilar@0 573 yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']',
pilar@2 574 title=station_location + " (" + station_id.upper() + ") - " + str(
pilar@4 575 datetime.fromtimestamp(dimensions['time'][0], tz=timezone.utc).strftime(
pilar@4 576 '%d/%m/%Y')) + " - ELDA MWL " + groups_names[
pilar@4 577 g],
pilar@4 578 timeinrows=timeinrows
pilar@0 579 )
pilar@0 580
pilar@0 581 # Show the plot (in Jupyter Notebook or Jupyter Lab)
pilar@2 582 # fig.show()
pilar@0 583
pilar@0 584 # Save the plot
pilar@4 585 if timeinrows:
pilar@4 586 filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp(
pilar@4 587 dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp(
pilar@4 588 dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[
pilar@4 589 g] + "_timeinrows.html"
pilar@4 590 else:
pilar@4 591 filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp(
pilar@4 592 dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp(
pilar@4 593 dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[g] + ".html"
pilar@4 594
pilar@0 595 fig.write_html(html_path + filename)
pilar@0 596
pilar@4 597 dimensions['time'] = timeiter
pilar@4 598
pilar@0 599 else:
pilar@4 600 print(f'There is no data to be plotted in the {groups_names[g]} group.')
pilar@4 601
pilar@4 602 h = h + 1
pilar@4 603
pilar@4 604 # Plotting high and low resolution together
pilar@4 605 if 'variables_with_data_lr' in locals() and 'variables_with_data_hr' in locals():
pilar@4 606 if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0:
pilar@4 607 print(f'We have low and high resolution data, let''s combine it!')
pilar@4 608
pilar@4 609 # Variables with data
pilar@4 610 variables_with_data = list(variables_with_data_lr)
pilar@4 611 variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data)
pilar@4 612 print(f'Variables with data: {variables_with_data}')
pilar@4 613
pilar@4 614 # Variables axis
pilar@4 615 variables_axis = list(variables_axis_lr)
pilar@4 616 variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis)
pilar@4 617
pilar@4 618 # Joining dimensions for both resolutions
pilar@4 619 # Wavelength
pilar@4 620 wav = list(dimensions_lr['wavelength'])
pilar@4 621 dhrw = list(dimensions_hr['wavelength'])
pilar@4 622 wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav)
pilar@4 623 dimensions['wavelength'] = wav
pilar@4 624 # Add LR/HR to the wavelength
pilar@4 625 wlr = list(dimensions_lr['wavelength'])
pilar@4 626 for r in range(len(wlr)):
pilar@4 627 wlr[r] = str(wlr[r]) + ' (LR)'
pilar@4 628 whr = list(dhrw)
pilar@4 629 for r in range(len(whr)):
pilar@4 630 whr[r] = str(whr[r]) + ' (HR)'
pilar@4 631 dimensions['wavelength_res'] = wlr + whr
pilar@4 632 dimensions_hr['wavelength'] = dhrw
pilar@4 633 dimensions['wavelength'] = wav
pilar@4 634 # Time
pilar@4 635 tim = list(dimensions_lr['time'])
pilar@4 636 tim.extend(x for x in dimensions_hr['time'] if x not in tim)
pilar@4 637 dimensions['time'] = tim
pilar@4 638 # Level
pilar@4 639 lev = list(dimensions_lr['level'])
pilar@4 640 lev.extend(x for x in dimensions_hr['level'] if x not in lev)
pilar@4 641 dimensions['level'] = lev
pilar@4 642
pilar@4 643 # Populate the arrays with data and errors
pilar@4 644 for v in variables_with_data:
pilar@4 645 # Create array to store the joint data
pilar@4 646 # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev)))
pilar@4 647 data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev)))
pilar@4 648 data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev)))
pilar@4 649 data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev)))
pilar@4 650 # ToDo check if needed
pilar@4 651 data_var = np.ma.masked_values(data_var, 0)
pilar@4 652 data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0)
pilar@4 653 data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0)
pilar@4 654 for w in range(len(dimensions['wavelength_res'])):
pilar@4 655 for t in range(len(dimensions['time'])):
pilar@4 656 # Incorporate data into the main array
pilar@4 657 print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.")
pilar@4 658
pilar@4 659 # High resolution
pilar@4 660 if dimensions['wavelength_res'][w].find('HR') >= 0:
pilar@4 661 # Is there HR data?
pilar@4 662 if v in data_hr:
pilar@4 663 try:
pilar@4 664 # Find position of w and t
pilar@4 665 wl = float(dimensions['wavelength_res'][w].split(' ')[0])
pilar@4 666 wp = dimensions_hr['wavelength'].index(wl)
pilar@4 667 wp = dhrw.index(wl) # ToDo test
pilar@4 668 tp = dimensions_hr['time'].index(dimensions['time'][t])
pilar@4 669 # Check if there is data
pilar@4 670 a = data_hr[v][wp, tp, :]
pilar@4 671 d = np.average(np.ma.masked_array(a, np.isnan(a)))
pilar@4 672 if np.ma.min(d) is not np.ma.masked:
pilar@4 673 print(f'\tWe have HR data for {v}')
pilar@4 674 # Save data into common structure
pilar@4 675 data_var[w, t, :] = a
pilar@4 676 if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan(
pilar@4 677 data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked:
pilar@4 678 data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :]
pilar@4 679 if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan(
pilar@4 680 data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked:
pilar@4 681 data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :]
pilar@4 682 except ValueError:
pilar@4 683 pass
pilar@4 684
pilar@4 685 # Low resolution
pilar@4 686 if dimensions['wavelength_res'][w].find('LR') >= 0:
pilar@4 687 # Is there LR data?
pilar@4 688 if v in data_lr:
pilar@4 689 # Find position of w and t
pilar@4 690 try:
pilar@4 691 wl = float(dimensions['wavelength_res'][w].split(' ')[0])
pilar@4 692 # wp = dimensions_hr['wavelength'].index(wl) # ToDo check
pilar@4 693 if wl in dimensions_lr['wavelength']:
pilar@4 694 wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w])
pilar@4 695 else:
pilar@4 696 wp = -1
pilar@4 697 # if dimensions['time'][t] in dimensions_lr['time']:
pilar@4 698 tp = dimensions_lr['time'].index(dimensions['time'][t])
pilar@4 699 # else:
pilar@4 700 # tp = -1
pilar@4 701 if wp > -1 and tp > -1:
pilar@4 702 # Check if there is data
pilar@4 703 a = data_lr[v][wp, tp, :]
pilar@4 704 d = np.average(np.ma.masked_array(a, np.isnan(a)))
pilar@4 705 if np.ma.min(d) is not np.ma.masked:
pilar@4 706 print(f'\tWe have LR data for {v}')
pilar@4 707 # Save data into common structure
pilar@4 708 data_var[w, t, :] = a
pilar@4 709 if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan(
pilar@4 710 data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked:
pilar@4 711 data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :]
pilar@4 712 if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan(
pilar@4 713 data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked:
pilar@4 714 data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :]
pilar@4 715 except ValueError:
pilar@4 716 # i.e. no 1064 wavelength in dimensions_lr
pilar@4 717 pass
pilar@4 718 # except IndexError:
pilar@4 719 # pass
pilar@4 720
pilar@4 721 # Store data in the global matrices
pilar@4 722 if variables_with_data.index(v) == 0:
pilar@4 723 data_all = {v: data_var}
pilar@4 724 data_error_positive = {v: data_error_positive_var}
pilar@4 725 data_error_negative = {v: data_error_negative_var}
pilar@4 726 else:
pilar@4 727 data_all[v] = data_var
pilar@4 728 data_error_positive[v] = data_error_positive_var
pilar@4 729 data_error_negative[v] = data_error_negative_var
pilar@4 730
pilar@4 731 # Plot
pilar@4 732 timeiter = dimensions['time']
pilar@4 733 # Default plotting
pilar@4 734 dimensions_t = dimensions
pilar@4 735 for t in range(len(dimensions['time'])):
pilar@4 736 print(f'plotting hr & lr for {timeiter[t]}')
pilar@4 737 dimensions_t['time'] = [timeiter[t]]
pilar@4 738 for vr in range(len(variables_with_data)):
pilar@4 739 if vr == 0:
pilar@4 740 data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]}
pilar@4 741 else:
pilar@4 742 data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :]
pilar@4 743
pilar@4 744 fig = plot_vertical_profiles_plotly_allres(
pilar@4 745 variables=variables_with_data,
pilar@4 746 data=data_t,
pilar@4 747 dimensions=dimensions_t,
pilar@4 748 y_array=altitude,
pilar@4 749 positive_error_array=data_error_positive,
pilar@4 750 negative_error_array=data_error_negative,
pilar@0 751 xaxis_label=variables_axis,
pilar@0 752 yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']',
pilar@2 753 title=station_location + " (" + station_id.upper() + ") - " + str(
pilar@4 754 datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime(
pilar@4 755 '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution")
pilar@0 756
pilar@0 757 # Show the plot (in Jupyter Notebook or Jupyter Lab)
pilar@2 758 # fig.show()
pilar@0 759
pilar@0 760 # Save the plot
pilar@2 761
pilar@4 762 # Timestamps for the filename
pilar@4 763 t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M')
pilar@4 764 if t + 1 < len(timeiter):
pilar@4 765 t1 = datetime.fromtimestamp(
pilar@4 766 timeiter[t + 1], tz=timezone.utc).strftime('%Y%m%d%H%M')
pilar@4 767 else:
pilar@4 768 t1 = netcdf_file[29:41]
pilar@2 769
pilar@4 770 filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_all.html"
pilar@4 771 fig.write_html(html_path + filename)

mercurial