Wed, 28 Aug 2024 11:28:24 +0000
Changed the timestamps to UTC and the filenames of the plots
# import nc as nc # import xarray as xr from config import * import plotly.graph_objects as go from plotly.subplots import make_subplots import netCDF4 as nc from datetime import datetime, timezone import numpy as np import sys from os.path import exists def check_group_presence(nc_file_path, group_name): with nc.Dataset(nc_file_path, 'r') as dataset: if group_name in dataset.groups: print(f'{group_name} group exists') return True else: print(f'{group_name} group does not exist') return False def read_variable_in_group(nc_file_path, group_name, variable_name): try: with nc.Dataset(nc_file_path, 'r') as dataset: # Find the group group = dataset.groups[group_name] # Access the variable try: variable = group.variables[variable_name] dimensions = variable.dimensions # print("Dimensions:", dimensions) # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} # variable_metadata.add = variable.long_name variable_metadata = \ {'name': variable.name.capitalize(), 'long_name': variable.long_name.capitalize(), 'units': variable.units, 'ndim': variable.ndim} # Create an index array for slicing the variable index_array = [slice(None)] * variable.ndim # Access the variable data using the index array variable_data = variable[index_array] # Fetch the dimension values dimension_values = {} for dim_name in dimensions: dim_values = group.variables[dim_name][:] dimension_values[dim_name] = dim_values # Adjust the index array for slicing index_array[variable.dimensions.index(dim_name)] = 0 # Now you have the variable data and its associated dimensions in a dictionary result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} # Print or use the result as needed # print(result) return result except KeyError: print("The specified variable does not exist in the group.") except FileNotFoundError: print(f"The file '{nc_file_path}' does not exist.") except Exception as e: print(f"The group {e} does not exist.") def read_global_attribute(nc_file_path, attribute_name): # Open the NetCDF file in read mode with nc.Dataset(nc_file_path, 'r') as dataset: # with netCDF4.Dataset(nc_file_path, 'r') as nc: # Check if the attribute exists if attribute_name in dataset.ncattrs(): # Read the attribute value attribute_value = dataset.getncattr(attribute_name) return attribute_value else: print(f"Attribute '{attribute_name}' not found in the NetCDF file.") def read_variable_in_group2(nc_file_path, group_name, variable_name): try: with nc.Dataset(nc_file_path, 'r') as dataset: # Find the group group = dataset.groups[group_name] # Access the variable try: variable = group.variables[variable_name] dimensions = variable.dimensions # print("Dimensions:", dimensions) # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} # variable_metadata.add = variable.long_name variable_metadata = \ {'name': variable.name.capitalize(), 'long_name': variable.long_name.capitalize(), 'units': variable.units, 'ndim': variable.ndim} # Create an index array for slicing the variable index_array = [slice(None)] * variable.ndim # Access the variable data using the index array variable_data = variable[index_array] # Fetch the dimension values dimension_values = {} for dim_name in dimensions: dim_values = group.variables[dim_name][:] dimension_values[dim_name] = dim_values.tolist() # Adjust the index array for slicing index_array[variable.dimensions.index(dim_name)] = 0 # Now you have the variable data and its associated dimensions in a dictionary # result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} # Print or use the result as needed # print(result) return variable_data, dimension_values, variable_metadata except KeyError: print(f"The variable {variable_name} does not exist in the {group_name} group.") except FileNotFoundError: print(f"The file {nc_file_path} does not exist.") except Exception as e: print(f"The group {e} does not exist.") def plot_vertical_profiles_plotly(variables, data, dimensions, y_array, positive_error_array, negative_error_array, xaxis_label, yaxis_label, title, timeinrows): """ ToDo Complete Plot vertical profiles using plotly Parameters: - Returns: - fig: html """ # Set number of columns, number of rows, and subtitles if needed ncols = len(variables) subtitles = None if timeinrows is True: nrows = len(dimensions['time']) if nrows > 1: for t in range(nrows): for v in range(ncols): if subtitles is None: subtitles = [str(datetime.fromtimestamp(dimensions['time'][t]))] else: subtitles += [str(datetime.fromtimestamp(dimensions['time'][t]))] else: nrows = 1 subtitles = '' # Create figure fig = make_subplots(rows=nrows, cols=ncols, subplot_titles=subtitles, shared_yaxes=True) # Define colours clrs = [['rgb(0,35,255)', 'rgb(0,20,150)', 'rgb(0,170,250)'], # UV ['rgb(90,210,50)', 'rgb(40,90,25)', 'rgb(120,250,80)'], # VIS ['rgb(250,30,30)', 'rgb(200,25,30)', 'rgb(250,125,0)']] # IR clrs = np.array(clrs) # Plot for v in range(len(variables)): for t in range(len(dimensions['time'])): for w in range(len(dimensions['wavelength'])): tn = str(datetime.fromtimestamp(dimensions['time'][t])) wn = str(dimensions['wavelength'][w]) if timeinrows: lgnd = wn + ' nm' rown = t + 1 colorind = 0 # subtitle = str(datetime.fromtimestamp(dimensions['time'][t])) else: lgnd = wn + ' nm - ' + tn rown = 1 colorind = t # colour definition if abs(dimensions['wavelength'][w] - 1064) < 1: clr = clrs[2, colorind] # clr = 'rgb(235,70,20)' # 'rgba(215,27,96,0.4)' elif abs(dimensions['wavelength'][w] - 532) < 1: clr = clrs[1, colorind] elif abs(dimensions['wavelength'][w] - 355) < 1: clr = clrs[0, colorind] else: clr = 'rgb(0,0,0)' data_values = data[variables[v]][w, t, :] if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: vertical_levels = y_array["data"][t, :] error_values = positive_error_array[variables[v]][w, t, :] # Add trace fig.add_trace(go.Scatter( x=data_values, y=vertical_levels, error_x=dict(array=error_values, width=0, thickness=0.1), # ToDo Include distinction between positive and negative errors mode='lines', line=dict(width=2, color=clr), name=lgnd), row=rown, col=v + 1, ) # ToDo Check formatting options (showlegend, mapbox) fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, col=v + 1) fig.update_xaxes(ticks="outside") fig.update_yaxes(ticks="outside", row=rown, col=v + 1) # ToDo check tickformat (i.e. tickformat=":.2e") if v == 0: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) # ToDo add boxes a = fig.layout['xaxis']['domain'] # print(a) b = fig.layout['yaxis']['domain'] # print(b) # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], # line=dict(color="grey", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, # line=dict(color="blue", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, # line=dict(color="red", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, # line=dict(color="green", width=2, ), row=rown, col=v+1) # fig.add_hline(y=0, line=dict(color="yellow", width=2)) # print(fig.data[0]['x']) # Set axis labels and title fig.update_layout( yaxis_title=yaxis_label, title=title, template="seaborn", # You can change the template as per your preference # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', # 'xgridoff', 'ygridoff', 'gridon', 'none'] height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), ) return fig def plot_vertical_profiles_plotly_allres(variables, data, dimensions, y_array, positive_error_array, negative_error_array, xaxis_label, yaxis_label, title): """ ToDo Complete Plot vertical profiles using plotly Parameters: - Returns: - fig: html """ # Set number of columns, number of rows, and subtitles if needed ncols = len(variables) nrows = 1 subtitles = '' # Create figure fig = make_subplots(rows=nrows, cols=ncols, subplot_titles=subtitles, shared_yaxes=True) # Define colours clrs = [['rgb(0,35,255)', 'rgb(0,20,150)'], # UV ['rgb(90,210,50)', 'rgb(40,90,25)'], # VIS ['rgb(250,30,30)', 'rgb(130,30,10)']] # IR clrs = np.array(clrs) # Plot for v in range(len(variables)): for t in range(len(dimensions['time'])): for w in range(len(dimensions['wavelength_res'])): wl = float(dimensions['wavelength_res'][w].split(' ')[0]) wn = str(dimensions['wavelength_res'][w]) lgnd = wn.replace(' ', ' nm ') rown = 1 if wn.find('LR') > 0: colorind = 0 else: colorind = 1 # colour definition if abs(wl - 1064) < 1: clr = clrs[2, colorind] elif abs(wl - 532) < 1: clr = clrs[1, colorind] elif abs(wl - 355) < 1: clr = clrs[0, colorind] else: clr = 'rgb(0,0,0)' data_values = data[variables[v]][w, t, :] if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: vertical_levels = y_array["data"][t, :] error_values = positive_error_array[variables[v]][w, t, :] # Add trace fig.add_trace(go.Scatter( x=data_values, y=vertical_levels, error_x=dict(array=error_values, width=0, thickness=0.1), # ToDo Include distinction between positive and negative errors mode='lines', line=dict(width=2, color=clr), name=lgnd), row=rown, col=v + 1, ) # ToDo Check formatting options (showlegend, mapbox) fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, col=v + 1) fig.update_xaxes(ticks="outside") fig.update_yaxes(ticks="outside", row=rown, col=v + 1) # ToDo check tickformat (i.e. tickformat=":.2e") if v == 0: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) # ToDo add boxes a = fig.layout['xaxis']['domain'] # print(a) b = fig.layout['yaxis']['domain'] # print(b) # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], # line=dict(color="grey", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, # line=dict(color="blue", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, # line=dict(color="red", width=2, ), row=rown, col=v+1) # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, # line=dict(color="green", width=2, ), row=rown, col=v+1) # fig.add_hline(y=0, line=dict(color="yellow", width=2)) # print(fig.data[0]['x']) # Set axis labels and title fig.update_layout( yaxis_title=yaxis_label, title=title, template="seaborn", # You can change the template as per your preference # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', # 'xgridoff', 'ygridoff', 'gridon', 'none'] height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), ) return fig # Files for testing # Backscatter and extinction, no error: # netcdf_file = 'hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc' # Extinction with error: # netcdf_file = 'hpb_012_0000627_202308240200_202308240400_20230824hpb0200_ELDAmwl_v0.0.1.nc' # v0.0.3 # pot_012_0000721_202406081503_202406081601_20240608pot150N_ELDAmwl_v0.0.3.nc try: netcdf_file = sys.argv[1] except: print('Syntax is incorrect, you need to pass the filename of the file to be plotted.\n' 'Example: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') exit() # Print help if netcdf_file == '-h' or netcdf_file == '--h': print('Syntax: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') exit() netcdf_file_path = netcdf_path + netcdf_file file_exists = exists(netcdf_file_path) if not file_exists: print('The file that you provided does not exist.') exit() # Groups of data in the NetCDF file (low and high resolution) groups = ['lowres_products', 'highres_products'] groups_names = ['Low resolution', 'High resolution'] # Variables that we want to plot variable = ['backscatter', 'extinction', 'lidarratio', 'volumedepolarization', 'particledepolarization'] # ToDo Test particledepolarization when available # Errors associated to the variables variable_error = ['error_backscatter', 'error_extinction', 'error_lidarratio', 'positive_systematic_error_volumedepolarization', 'error_particledepolarization'] variable_negative_error_volumedepol = 'negative_systematic_error_volumedepolarization' # ToDo Consider adding other variables: # cloud_mask (time, level) # Read global attributes station_id = read_global_attribute(netcdf_file_path, 'station_ID') station_location = read_global_attribute(netcdf_file_path, 'location') # Read data h = 0 for g in range(len(groups)): if check_group_presence(netcdf_file_path, groups[g]): # Read the altitude variable altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude') if altitude['metadata']['units'] == 'm': altitude['data'] = altitude['data'] / 1000.0 altitude['metadata']['units'] = 'km' # Initialize variables data = None variables_with_data = None variables_axis = None # Read the data for v in range(len(variable)): # Initialize var var = None try: var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v]) except TypeError: print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object") if var is not None: # If the variable that we're trying to read exists if var.max() != var.min(): # If there is data in the array if variables_with_data is None: variables_with_data = [variable[v]] variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]'] else: variables_with_data += [variable[v]] variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]'] var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g], variable_error[v]) if variable[v] == 'volumedepolarization': var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable_negative_error_volumedepol) else: var_error_negative = var_error # Add read data to the data variable to have it all grouped if data is None: # Start populating data and data_error data = {variable[v]: var} data_error_positive = {variable[v]: var_error} data_error_negative = {variable[v]: var_error_negative} else: # Add more data to data and data_error data[variable[v]] = var data_error_positive[variable[v]] = var_error data_error_negative[variable[v]] = var_error_negative else: print(f'There is no data for {variable[v]} in the {groups_names[g]} group.') dimensions = dim # Save lowres and highres in variables to plot them together afterwards if groups[g] == 'lowres_products': altitude_lr = altitude dimensions_lr = dim data_lr = data data_error_positive_lr = data_error_positive data_error_negative_lr = data_error_negative variables_with_data_lr = variables_with_data variables_axis_lr = variables_axis if groups[g] == 'highres_products': altitude_hr = altitude dimensions_hr = dim data_hr = data data_error_positive_hr = data_error_positive data_error_negative_hr = data_error_negative variables_with_data_hr = variables_with_data variables_axis_hr = variables_axis # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows # If interactive is set to False, proceed with default values if h == 0: # Initialize timeinplots = True timeinrows = False # If interactive (config.py) = True, ask user how he/she wants the plot if interactive and len(dimensions['time']) > 1: answer = input( f"There are {len(dimensions['time'])} temporal profiles, " f"do you want to plot them on different plots [y/n]? ") if answer[0:1].lower() == 'n': timeinplots = False if not timeinplots: answer = input( f"There are {len(dimensions['time'])} temporal profiles, " f"do you want to plot them on different rows [y/n]? ") if answer[0:1].lower() == 'y': timeinrows = True if variables_with_data is not None: timeiter = dimensions['time'] if timeinplots: # Default plotting dimensions_t = dimensions for t in range(len(dimensions['time'])): dimensions_t['time'] = [timeiter[t]] for vr in range(len(variables_with_data)): if vr == 0: data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]} else: data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :] title = station_location + " (" + station_id.upper() + ") - " + str( datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL " + groups_names[g] # Timestamps for the filename t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') if t + 1 < len(timeiter): t1 = datetime.fromtimestamp( timeiter[t+1], tz=timezone.utc).strftime('%Y%m%d%H%M') else: t1 = netcdf_file[29:41] filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_" + \ groups[g] + ".html" fig = plot_vertical_profiles_plotly( variables=variables_with_data, data=data_t, dimensions=dimensions_t, y_array=altitude, positive_error_array=data_error_positive, negative_error_array=data_error_negative, xaxis_label=variables_axis, yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', title=title, timeinrows=True ) # Show the plot (in Jupyter Notebook or Jupyter Lab) # fig.show() # Save the plot fig.write_html(html_path + filename) else: # Plotting in one plot (even if there are different time profiles) fig = plot_vertical_profiles_plotly( variables_with_data, data, dimensions, altitude, data_error_positive, data_error_negative, xaxis_label=variables_axis, yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', title=station_location + " (" + station_id.upper() + ") - " + str( datetime.fromtimestamp(dimensions['time'][0], tz=timezone.utc).strftime( '%d/%m/%Y')) + " - ELDA MWL " + groups_names[ g], timeinrows=timeinrows ) # Show the plot (in Jupyter Notebook or Jupyter Lab) # fig.show() # Save the plot if timeinrows: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[ g] + "_timeinrows.html" else: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[g] + ".html" fig.write_html(html_path + filename) dimensions['time'] = timeiter else: print(f'There is no data to be plotted in the {groups_names[g]} group.') h = h + 1 # Plotting high and low resolution together if 'variables_with_data_lr' in locals() and 'variables_with_data_hr' in locals(): if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0: print(f'We have low and high resolution data, let''s combine it!') # Variables with data variables_with_data = list(variables_with_data_lr) variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data) print(f'Variables with data: {variables_with_data}') # Variables axis variables_axis = list(variables_axis_lr) variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis) # Joining dimensions for both resolutions # Wavelength wav = list(dimensions_lr['wavelength']) dhrw = list(dimensions_hr['wavelength']) wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav) dimensions['wavelength'] = wav # Add LR/HR to the wavelength wlr = list(dimensions_lr['wavelength']) for r in range(len(wlr)): wlr[r] = str(wlr[r]) + ' (LR)' whr = list(dhrw) for r in range(len(whr)): whr[r] = str(whr[r]) + ' (HR)' dimensions['wavelength_res'] = wlr + whr dimensions_hr['wavelength'] = dhrw dimensions['wavelength'] = wav # Time tim = list(dimensions_lr['time']) tim.extend(x for x in dimensions_hr['time'] if x not in tim) dimensions['time'] = tim # Level lev = list(dimensions_lr['level']) lev.extend(x for x in dimensions_hr['level'] if x not in lev) dimensions['level'] = lev # Populate the arrays with data and errors for v in variables_with_data: # Create array to store the joint data # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev))) data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) # ToDo check if needed data_var = np.ma.masked_values(data_var, 0) data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0) data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0) for w in range(len(dimensions['wavelength_res'])): for t in range(len(dimensions['time'])): # Incorporate data into the main array print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.") # High resolution if dimensions['wavelength_res'][w].find('HR') >= 0: # Is there HR data? if v in data_hr: try: # Find position of w and t wl = float(dimensions['wavelength_res'][w].split(' ')[0]) wp = dimensions_hr['wavelength'].index(wl) wp = dhrw.index(wl) # ToDo test tp = dimensions_hr['time'].index(dimensions['time'][t]) # Check if there is data a = data_hr[v][wp, tp, :] d = np.average(np.ma.masked_array(a, np.isnan(a))) if np.ma.min(d) is not np.ma.masked: print(f'\tWe have HR data for {v}') # Save data into common structure data_var[w, t, :] = a if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan( data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked: data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :] if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan( data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked: data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :] except ValueError: pass # Low resolution if dimensions['wavelength_res'][w].find('LR') >= 0: # Is there LR data? if v in data_lr: # Find position of w and t try: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) # wp = dimensions_hr['wavelength'].index(wl) # ToDo check if wl in dimensions_lr['wavelength']: wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w]) else: wp = -1 # if dimensions['time'][t] in dimensions_lr['time']: tp = dimensions_lr['time'].index(dimensions['time'][t]) # else: # tp = -1 if wp > -1 and tp > -1: # Check if there is data a = data_lr[v][wp, tp, :] d = np.average(np.ma.masked_array(a, np.isnan(a))) if np.ma.min(d) is not np.ma.masked: print(f'\tWe have LR data for {v}') # Save data into common structure data_var[w, t, :] = a if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan( data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked: data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :] if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan( data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked: data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :] except ValueError: # i.e. no 1064 wavelength in dimensions_lr pass # except IndexError: # pass # Store data in the global matrices if variables_with_data.index(v) == 0: data_all = {v: data_var} data_error_positive = {v: data_error_positive_var} data_error_negative = {v: data_error_negative_var} else: data_all[v] = data_var data_error_positive[v] = data_error_positive_var data_error_negative[v] = data_error_negative_var # Plot timeiter = dimensions['time'] # Default plotting dimensions_t = dimensions for t in range(len(dimensions['time'])): print(f'plotting hr & lr for {timeiter[t]}') dimensions_t['time'] = [timeiter[t]] for vr in range(len(variables_with_data)): if vr == 0: data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]} else: data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :] fig = plot_vertical_profiles_plotly_allres( variables=variables_with_data, data=data_t, dimensions=dimensions_t, y_array=altitude, positive_error_array=data_error_positive, negative_error_array=data_error_negative, xaxis_label=variables_axis, yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', title=station_location + " (" + station_id.upper() + ") - " + str( datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution") # Show the plot (in Jupyter Notebook or Jupyter Lab) # fig.show() # Save the plot # Timestamps for the filename t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') if t + 1 < len(timeiter): t1 = datetime.fromtimestamp( timeiter[t + 1], tz=timezone.utc).strftime('%Y%m%d%H%M') else: t1 = netcdf_file[29:41] filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_all.html" fig.write_html(html_path + filename)