pilar@2: # import nc as nc pilar@2: # import xarray as xr pilar@0: from config import * pilar@0: import plotly.graph_objects as go pilar@0: from plotly.subplots import make_subplots pilar@0: import netCDF4 as nc pilar@0: from datetime import datetime pilar@0: import numpy as np pilar@0: import sys pilar@0: from os.path import exists pilar@0: pilar@0: pilar@0: def read_variable_in_group(nc_file_path, group_name, variable_name): pilar@0: try: pilar@0: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@0: # Find the group pilar@0: group = dataset.groups[group_name] pilar@0: pilar@0: # Access the variable pilar@0: try: pilar@0: variable = group.variables[variable_name] pilar@0: dimensions = variable.dimensions pilar@0: # print("Dimensions:", dimensions) pilar@0: # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} pilar@0: pilar@0: # variable_metadata.add = variable.long_name pilar@0: variable_metadata = \ pilar@0: {'name': variable.name.capitalize(), pilar@0: 'long_name': variable.long_name.capitalize(), pilar@0: 'units': variable.units, pilar@0: 'ndim': variable.ndim} pilar@0: pilar@0: # Create an index array for slicing the variable pilar@0: index_array = [slice(None)] * variable.ndim pilar@0: # Access the variable data using the index array pilar@0: variable_data = variable[index_array] pilar@0: pilar@0: # Fetch the dimension values pilar@0: dimension_values = {} pilar@0: for dim_name in dimensions: pilar@0: dim_values = group.variables[dim_name][:] pilar@0: dimension_values[dim_name] = dim_values pilar@0: # Adjust the index array for slicing pilar@0: index_array[variable.dimensions.index(dim_name)] = 0 pilar@0: pilar@0: # Now you have the variable data and its associated dimensions in a dictionary pilar@0: result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} pilar@0: pilar@0: # Print or use the result as needed pilar@0: # print(result) pilar@0: return result pilar@0: pilar@0: except KeyError: pilar@0: print("The specified variable does not exist in the group.") pilar@0: pilar@0: except FileNotFoundError: pilar@0: print(f"The file '{nc_file_path}' does not exist.") pilar@0: except Exception as e: pilar@0: print(f"The group {e} does not exist.") pilar@0: pilar@0: pilar@2: def read_global_attribute(nc_file_path, attribute_name): pilar@2: # Open the NetCDF file in read mode pilar@2: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@2: # with netCDF4.Dataset(nc_file_path, 'r') as nc: pilar@2: # Check if the attribute exists pilar@2: if attribute_name in dataset.ncattrs(): pilar@2: # Read the attribute value pilar@2: attribute_value = dataset.getncattr(attribute_name) pilar@2: return attribute_value pilar@2: else: pilar@2: print(f"Attribute '{attribute_name}' not found in the NetCDF file.") pilar@2: pilar@2: pilar@0: def read_variable_in_group2(nc_file_path, group_name, variable_name): pilar@0: try: pilar@0: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@0: # Find the group pilar@0: group = dataset.groups[group_name] pilar@0: pilar@0: # Access the variable pilar@0: try: pilar@0: variable = group.variables[variable_name] pilar@0: dimensions = variable.dimensions pilar@0: # print("Dimensions:", dimensions) pilar@0: # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} pilar@0: pilar@0: # variable_metadata.add = variable.long_name pilar@0: variable_metadata = \ pilar@0: {'name': variable.name.capitalize(), pilar@0: 'long_name': variable.long_name.capitalize(), pilar@0: 'units': variable.units, pilar@0: 'ndim': variable.ndim} pilar@0: pilar@0: # Create an index array for slicing the variable pilar@0: index_array = [slice(None)] * variable.ndim pilar@0: # Access the variable data using the index array pilar@0: variable_data = variable[index_array] pilar@0: pilar@0: # Fetch the dimension values pilar@0: dimension_values = {} pilar@0: for dim_name in dimensions: pilar@0: dim_values = group.variables[dim_name][:] pilar@2: dimension_values[dim_name] = dim_values.tolist() pilar@0: # Adjust the index array for slicing pilar@0: index_array[variable.dimensions.index(dim_name)] = 0 pilar@0: pilar@0: # Now you have the variable data and its associated dimensions in a dictionary pilar@0: # result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} pilar@0: pilar@0: # Print or use the result as needed pilar@0: # print(result) pilar@0: return variable_data, dimension_values, variable_metadata pilar@0: pilar@0: except KeyError: pilar@0: print(f"The variable {variable_name} does not exist in the {group_name} group.") pilar@0: except FileNotFoundError: pilar@0: print(f"The file {nc_file_path} does not exist.") pilar@0: except Exception as e: pilar@0: print(f"The group {e} does not exist.") pilar@0: pilar@0: pilar@0: def plot_vertical_profiles_plotly(variables, data, dimensions, y_array, positive_error_array, negative_error_array, pilar@0: xaxis_label, yaxis_label, title, timeinrows): pilar@0: """ pilar@0: ToDo Complete pilar@0: Plot vertical profiles using plotly pilar@0: pilar@0: Parameters: pilar@0: - pilar@0: pilar@0: Returns: pilar@0: - fig: html pilar@0: """ pilar@0: pilar@0: # Set number of columns, number of rows, and subtitles if needed pilar@0: ncols = len(variables) pilar@0: subtitles = None pilar@0: if timeinrows is True: pilar@0: nrows = len(dimensions['time']) pilar@0: if nrows > 1: pilar@0: for t in range(nrows): pilar@0: for v in range(ncols): pilar@0: if subtitles is None: pilar@0: subtitles = [str(datetime.fromtimestamp(dimensions['time'][t]))] pilar@0: else: pilar@0: subtitles += [str(datetime.fromtimestamp(dimensions['time'][t]))] pilar@0: else: pilar@0: nrows = 1 pilar@0: subtitles = '' pilar@0: pilar@0: # Create figure pilar@0: fig = make_subplots(rows=nrows, cols=ncols, pilar@0: subplot_titles=subtitles, pilar@0: shared_yaxes=True) pilar@0: pilar@0: # Define colours pilar@0: clrs = [['rgb(0,35,255)', 'rgb(0,20,150)', 'rgb(0,170,250)'], # UV pilar@0: ['rgb(90,210,50)', 'rgb(40,90,25)', 'rgb(120,250,80)'], # VIS pilar@0: ['rgb(250,30,30)', 'rgb(200,25,30)', 'rgb(250,125,0)']] # IR pilar@0: clrs = np.array(clrs) pilar@0: pilar@0: # Plot pilar@0: for v in range(len(variables)): pilar@0: for t in range(len(dimensions['time'])): pilar@0: for w in range(len(dimensions['wavelength'])): pilar@0: tn = str(datetime.fromtimestamp(dimensions['time'][t])) pilar@0: wn = str(dimensions['wavelength'][w]) pilar@2: if timeinrows: pilar@0: lgnd = wn + ' nm' pilar@0: rown = t + 1 pilar@0: colorind = 0 pilar@2: # subtitle = str(datetime.fromtimestamp(dimensions['time'][t])) pilar@0: else: pilar@0: lgnd = wn + ' nm - ' + tn pilar@0: rown = 1 pilar@0: colorind = t pilar@0: pilar@0: # colour definition pilar@0: if abs(dimensions['wavelength'][w] - 1064) < 1: pilar@0: clr = clrs[2, colorind] pilar@0: # clr = 'rgb(235,70,20)' # 'rgba(215,27,96,0.4)' pilar@0: elif abs(dimensions['wavelength'][w] - 532) < 1: pilar@0: clr = clrs[1, colorind] pilar@0: elif abs(dimensions['wavelength'][w] - 355) < 1: pilar@0: clr = clrs[0, colorind] pilar@0: else: pilar@0: clr = 'rgb(0,0,0)' pilar@0: pilar@0: data_values = data[variables[v]][w, t, :] pilar@2: if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: pilar@2: vertical_levels = y_array["data"][t, :] pilar@2: error_values = positive_error_array[variables[v]][w, t, :] pilar@2: pilar@2: # Add trace pilar@2: fig.add_trace(go.Scatter( pilar@2: x=data_values, pilar@2: y=vertical_levels, pilar@2: error_x=dict(array=error_values, width=0, thickness=0.1), pilar@2: # ToDo Include distinction between positive and negative errors pilar@2: mode='lines', pilar@2: line=dict(width=2, color=clr), pilar@2: name=lgnd), pilar@2: row=rown, col=v + 1, pilar@2: ) pilar@2: pilar@2: # ToDo Check formatting options (showlegend, mapbox) pilar@2: fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) pilar@2: fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, pilar@2: col=v + 1) pilar@2: pilar@2: fig.update_xaxes(ticks="outside") pilar@2: fig.update_yaxes(ticks="outside", row=rown, col=v + 1) pilar@2: # ToDo check tickformat (i.e. tickformat=":.2e") pilar@2: if v == 0: pilar@2: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) pilar@2: pilar@2: # ToDo add boxes pilar@2: a = fig.layout['xaxis']['domain'] pilar@2: # print(a) pilar@2: b = fig.layout['yaxis']['domain'] pilar@2: # print(b) pilar@2: # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) pilar@2: # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], pilar@2: # line=dict(color="grey", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, pilar@2: # line=dict(color="blue", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, pilar@2: # line=dict(color="red", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, pilar@2: # line=dict(color="green", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_hline(y=0, line=dict(color="yellow", width=2)) pilar@2: # print(fig.data[0]['x']) pilar@2: pilar@2: # Set axis labels and title pilar@2: fig.update_layout( pilar@2: yaxis_title=yaxis_label, pilar@2: title=title, pilar@2: template="seaborn", # You can change the template as per your preference pilar@2: # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', pilar@2: # 'xgridoff', 'ygridoff', 'gridon', 'none'] pilar@2: height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), pilar@2: ) pilar@2: pilar@2: return fig pilar@2: pilar@2: pilar@2: def plot_vertical_profiles_plotly_allres(variables, data, dimensions, y_array, positive_error_array, pilar@2: negative_error_array, xaxis_label, yaxis_label, title): pilar@2: """ pilar@2: ToDo Complete pilar@2: Plot vertical profiles using plotly pilar@2: pilar@2: Parameters: pilar@2: - pilar@2: pilar@2: Returns: pilar@2: - fig: html pilar@2: """ pilar@2: pilar@2: # Set number of columns, number of rows, and subtitles if needed pilar@2: ncols = len(variables) pilar@2: nrows = 1 pilar@2: subtitles = '' pilar@0: pilar@2: # Create figure pilar@2: fig = make_subplots(rows=nrows, cols=ncols, pilar@2: subplot_titles=subtitles, pilar@2: shared_yaxes=True) pilar@2: pilar@2: # Define colours pilar@2: clrs = [['rgb(0,35,255)', 'rgb(0,20,150)'], # UV pilar@2: ['rgb(90,210,50)', 'rgb(40,90,25)'], # VIS pilar@2: ['rgb(250,30,30)', 'rgb(130,30,10)']] # IR pilar@2: clrs = np.array(clrs) pilar@2: pilar@2: # Plot pilar@2: for v in range(len(variables)): pilar@2: for t in range(len(dimensions['time'])): pilar@2: for w in range(len(dimensions['wavelength_res'])): pilar@2: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@2: wn = str(dimensions['wavelength_res'][w]) pilar@2: lgnd = wn.replace(' ', ' nm ') pilar@2: rown = 1 pilar@2: if wn.find('LR') > 0: pilar@2: colorind = 0 pilar@2: else: pilar@2: colorind = 1 pilar@2: pilar@2: # colour definition pilar@2: if abs(wl - 1064) < 1: pilar@2: clr = clrs[2, colorind] pilar@2: elif abs(wl - 532) < 1: pilar@2: clr = clrs[1, colorind] pilar@2: elif abs(wl - 355) < 1: pilar@2: clr = clrs[0, colorind] pilar@2: else: pilar@2: clr = 'rgb(0,0,0)' pilar@0: pilar@2: data_values = data[variables[v]][w, t, :] pilar@2: if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: pilar@2: vertical_levels = y_array["data"][t, :] pilar@2: error_values = positive_error_array[variables[v]][w, t, :] pilar@2: # Add trace pilar@2: fig.add_trace(go.Scatter( pilar@2: x=data_values, pilar@2: y=vertical_levels, pilar@2: error_x=dict(array=error_values, width=0, thickness=0.1), pilar@2: # ToDo Include distinction between positive and negative errors pilar@2: mode='lines', pilar@2: line=dict(width=2, color=clr), pilar@2: name=lgnd), pilar@2: row=rown, col=v + 1, pilar@2: ) pilar@2: pilar@2: # ToDo Check formatting options (showlegend, mapbox) pilar@2: fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) pilar@2: fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, pilar@2: col=v + 1) pilar@0: pilar@2: fig.update_xaxes(ticks="outside") pilar@2: fig.update_yaxes(ticks="outside", row=rown, col=v + 1) pilar@2: # ToDo check tickformat (i.e. tickformat=":.2e") pilar@2: if v == 0: pilar@2: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) pilar@2: pilar@2: # ToDo add boxes pilar@2: a = fig.layout['xaxis']['domain'] pilar@2: # print(a) pilar@2: b = fig.layout['yaxis']['domain'] pilar@2: # print(b) pilar@2: # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) pilar@2: # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], pilar@2: # line=dict(color="grey", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, pilar@2: # line=dict(color="blue", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, pilar@2: # line=dict(color="red", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, pilar@2: # line=dict(color="green", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_hline(y=0, line=dict(color="yellow", width=2)) pilar@2: # print(fig.data[0]['x']) pilar@0: pilar@0: # Set axis labels and title pilar@0: fig.update_layout( pilar@0: yaxis_title=yaxis_label, pilar@0: title=title, pilar@0: template="seaborn", # You can change the template as per your preference pilar@0: # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', pilar@0: # 'xgridoff', 'ygridoff', 'gridon', 'none'] pilar@0: height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), pilar@0: ) pilar@0: pilar@0: return fig pilar@0: pilar@0: pilar@0: # Files for testing pilar@2: # Backscatter and extinction, no error: pilar@2: # netcdf_file = 'hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc' pilar@2: # Extinction with error: pilar@2: # netcdf_file = 'hpb_012_0000627_202308240200_202308240400_20230824hpb0200_ELDAmwl_v0.0.1.nc' pilar@0: pilar@0: pilar@0: try: pilar@0: netcdf_file = sys.argv[1] pilar@0: except: pilar@0: print('Syntax is incorrect, you need to pass the filename of the file to be plotted.\n' pilar@0: 'Example: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') pilar@0: exit() pilar@0: pilar@2: # Print help pilar@2: if netcdf_file == '-h' or netcdf_file == '--h': pilar@2: print('Syntax: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') pilar@2: exit() pilar@2: pilar@0: netcdf_file_path = netcdf_path + netcdf_file pilar@0: pilar@0: file_exists = exists(netcdf_file_path) pilar@0: if not file_exists: pilar@0: print('The file that you provided does not exist.') pilar@0: exit() pilar@0: pilar@0: # Groups of data in the NetCDF file (low and high resolution) pilar@0: groups = ['lowres_products', 'highres_products'] pilar@0: groups_names = ['Low resolution', 'High resolution'] pilar@0: pilar@0: # Variables that we want to plot pilar@2: variable = ['backscatter', 'extinction', 'lidarratio', 'volumedepolarization', pilar@2: 'particledepolarization'] # ToDo Test particledepolarization when available pilar@0: # Errors associated to the variables pilar@0: variable_error = ['error_backscatter', 'error_extinction', 'error_lidarratio', pilar@0: 'positive_systematic_error_volumedepolarization', 'error_particledepolarization'] pilar@0: variable_negative_error_volumedepol = 'negative_systematic_error_volumedepolarization' pilar@2: # ToDo Consider adding other variables: pilar@0: # cloud_mask (time, level) pilar@0: pilar@2: # Read global attributes pilar@2: station_id = read_global_attribute(netcdf_file_path, 'station_ID') pilar@2: station_location = read_global_attribute(netcdf_file_path, 'location') pilar@0: pilar@2: # Read data pilar@0: for g in range(len(groups)): pilar@0: # Read the altitude variable pilar@0: altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude') pilar@0: pilar@0: if altitude['metadata']['units'] == 'm': pilar@0: altitude['data'] = altitude['data'] / 1000.0 pilar@0: altitude['metadata']['units'] = 'km' pilar@0: pilar@0: # Initialize variables pilar@0: data = None pilar@0: variables_with_data = None pilar@0: variables_axis = None pilar@0: pilar@0: # Read the data pilar@0: for v in range(len(variable)): pilar@0: # Initialize var pilar@0: var = None pilar@0: pilar@0: try: pilar@0: var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v]) pilar@0: except TypeError: pilar@0: print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object") pilar@0: pilar@0: if var is not None: # If the variable that we're trying to read exists pilar@0: if var.max() != var.min(): # If there is data in the array pilar@0: if variables_with_data is None: pilar@0: variables_with_data = [variable[v]] pilar@0: variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]'] pilar@0: else: pilar@0: variables_with_data += [variable[v]] pilar@0: variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]'] pilar@0: pilar@0: var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g], pilar@0: variable_error[v]) pilar@0: if variable[v] == 'volumedepolarization': pilar@0: var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], pilar@0: variable_negative_error_volumedepol) pilar@0: else: pilar@0: var_error_negative = var_error pilar@0: pilar@0: # Add read data to the data variable to have it all grouped pilar@0: if data is None: pilar@0: # Start populating data and data_error pilar@0: data = {variable[v]: var} pilar@0: data_error_positive = {variable[v]: var_error} pilar@0: data_error_negative = {variable[v]: var_error_negative} pilar@0: else: pilar@0: # Add more data to data and data_error pilar@0: data[variable[v]] = var pilar@0: data_error_positive[variable[v]] = var_error pilar@0: data_error_negative[variable[v]] = var_error_negative pilar@0: else: pilar@0: print(f'There is no data for {variable[v]} in the {groups_names[g]} group.') pilar@0: pilar@0: dimensions = dim pilar@0: pilar@2: # Save lowres and highres in variables to plot them together afterwards pilar@2: if groups[g] == 'lowres_products': pilar@2: altitude_lr = altitude pilar@2: dimensions_lr = dim pilar@2: data_lr = data pilar@2: data_error_positive_lr = data_error_positive pilar@2: data_error_negative_lr = data_error_negative pilar@2: variables_with_data_lr = variables_with_data pilar@2: variables_axis_lr = variables_axis pilar@2: if groups[g] == 'highres_products': pilar@2: altitude_hr = altitude pilar@2: dimensions_hr = dim pilar@2: data_hr = data pilar@2: data_error_positive_hr = data_error_positive pilar@2: data_error_negative_hr = data_error_negative pilar@2: variables_with_data_hr = variables_with_data pilar@2: variables_axis_hr = variables_axis pilar@2: pilar@2: # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows pilar@2: # If interactive is set to False, proceed with default values pilar@0: if g == 0: pilar@0: # Initialize pilar@2: timeinplots = True pilar@0: timeinrows = False pilar@2: # If interactive (config.py) = True, ask user how he/she wants the plot pilar@2: if interactive and len(dimensions['time']) > 1: pilar@0: answer = input( pilar@2: f"There are {len(dimensions['time'])} temporal profiles, " pilar@2: f"do you want to plot them on different plots [y/n]? ") pilar@2: if answer[0:1].lower() == 'n': pilar@2: timeinplots = False pilar@2: if not timeinplots: pilar@0: answer = input( pilar@2: f"There are {len(dimensions['time'])} temporal profiles, " pilar@2: f"do you want to plot them on different rows [y/n]? ") pilar@0: if answer[0:1].lower() == 'y': pilar@0: timeinrows = True pilar@0: pilar@0: if variables_with_data is not None: pilar@0: timeiter = dimensions['time'] pilar@2: if timeinplots: pilar@2: # Default plotting pilar@0: dimensions_t = dimensions pilar@0: for t in range(len(dimensions['time'])): pilar@0: dimensions_t['time'] = [timeiter[t]] pilar@0: for vr in range(len(variables_with_data)): pilar@0: if vr == 0: pilar@0: data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]} pilar@0: else: pilar@0: data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :] pilar@0: pilar@0: fig = plot_vertical_profiles_plotly( pilar@0: variables=variables_with_data, pilar@0: data=data_t, pilar@0: dimensions=dimensions_t, pilar@0: y_array=altitude, pilar@0: positive_error_array=data_error_positive, pilar@0: negative_error_array=data_error_negative, pilar@0: xaxis_label=variables_axis, pilar@0: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@2: title=station_location + " (" + station_id.upper() + ") - " + str( pilar@2: datetime.fromtimestamp(dimensions_t['time'][0]).strftime('%d/%m/%Y %H:%M:%S')) + pilar@2: " - ELDA MWL " + groups_names[g], pilar@0: timeinrows=True pilar@0: ) pilar@0: pilar@0: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@2: # fig.show() pilar@0: pilar@0: # Save the plot pilar@0: filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + datetime.fromtimestamp( pilar@0: dimensions['time'][0]).strftime('%Y%m%d-%H%M%S') + "_" + groups[g] + ".html" pilar@0: fig.write_html(html_path + filename) pilar@0: pilar@0: else: pilar@0: # Plotting in one plot (even if there are different time profiles) pilar@0: fig = plot_vertical_profiles_plotly( pilar@0: variables_with_data, pilar@0: data, pilar@0: dimensions, pilar@0: altitude, pilar@0: data_error_positive, pilar@0: data_error_negative, pilar@0: xaxis_label=variables_axis, pilar@0: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@2: title=station_location + " (" + station_id.upper() + ") - " + str( pilar@0: datetime.fromtimestamp(dimensions['time'][0]).strftime('%d/%m/%Y')) + " - ELDA MWL " + groups_names[ pilar@0: g], pilar@0: timeinrows=timeinrows pilar@0: ) pilar@0: pilar@0: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@2: # fig.show() pilar@0: pilar@0: # Save the plot pilar@2: if timeinrows: pilar@2: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( pilar@0: dimensions['time'][0]).strftime('%Y%m%d%H%M%S') + '-' + datetime.fromtimestamp( pilar@0: dimensions['time'][-1]).strftime('%Y%m%d%H%M%S') + "_" + groups[g] + "_timeinrows.html" pilar@0: else: pilar@2: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( pilar@0: dimensions['time'][0]).strftime('%Y%m%d%H%M%S') + '-' + datetime.fromtimestamp( pilar@0: dimensions['time'][-1]).strftime('%Y%m%d%H%M%S') + "_" + groups[g] + ".html" pilar@0: pilar@0: fig.write_html(html_path + filename) pilar@0: pilar@0: else: pilar@0: print(f'There is no data to be plotted in the {groups_names[g]} group.') pilar@2: pilar@2: # Plotting high and low resolution together pilar@2: if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0: pilar@2: print(f'We have low and high resolution data, let''s combine it!') pilar@2: pilar@2: # Variables with data pilar@2: variables_with_data = list(variables_with_data_lr) pilar@2: variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data) pilar@2: print(f'Variables with data: {variables_with_data}') pilar@2: pilar@2: # Variables axis pilar@2: variables_axis = list(variables_axis_lr) pilar@2: variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis) pilar@2: pilar@2: # Joining dimensions for both resolutions pilar@2: # Wavelength pilar@2: wav = list(dimensions_lr['wavelength']) pilar@2: wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav) pilar@2: dimensions['wavelength'] = wav pilar@2: # Add LR/HR to the wavelength pilar@2: wlr = list(dimensions_lr['wavelength']) pilar@2: for r in range(len(wlr)): pilar@2: wlr[r] = str(wlr[r]) + ' (LR)' pilar@2: whr = list(dimensions_hr['wavelength']) pilar@2: for r in range(len(whr)): pilar@2: whr[r] = str(whr[r]) + ' (HR)' pilar@2: dimensions['wavelength_res'] = wlr + whr pilar@2: # Time pilar@2: tim = list(dimensions_lr['time']) pilar@2: tim.extend(x for x in dimensions_hr['time'] if x not in tim) pilar@2: dimensions['time'] = tim pilar@2: # Level pilar@2: lev = list(dimensions_lr['level']) pilar@2: lev.extend(x for x in dimensions_hr['level'] if x not in lev) pilar@2: dimensions['level'] = lev pilar@2: pilar@2: # Populate the arrays with data and errors pilar@2: for v in variables_with_data: pilar@2: # Create array to store the joint data pilar@2: # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@2: data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@2: data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@2: data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@2: # ToDo check if needed pilar@2: data_var = np.ma.masked_values(data_var, 0) pilar@2: data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0) pilar@2: data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0) pilar@2: for w in range(len(dimensions['wavelength_res'])): pilar@2: for t in range(len(dimensions['time'])): pilar@2: # Incorporate data into the main array pilar@2: print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.") pilar@2: pilar@2: # High resolution pilar@2: if dimensions['wavelength_res'][w].find('HR') >= 0: pilar@2: # Is there HR data? pilar@2: if v in data_hr: pilar@2: try: pilar@2: # Find position of w and t pilar@2: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@2: wp = dimensions_hr['wavelength'].index(wl) pilar@2: tp = dimensions_hr['time'].index(dimensions['time'][t]) pilar@2: # Check if there is data pilar@2: a = data_hr[v][wp, tp, :] pilar@2: d = np.average(np.ma.masked_array(a, np.isnan(a))) pilar@2: if np.ma.min(d) is not np.ma.masked: pilar@2: print(f'\tWe have HR data for {v}') pilar@2: # Save data into common structure pilar@2: data_var[w, t, :] = a pilar@2: if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan( pilar@2: data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked: pilar@2: data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :] pilar@2: if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan( pilar@2: data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked: pilar@2: data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :] pilar@2: except ValueError: pilar@2: pass pilar@2: pilar@2: # Low resolution pilar@2: if dimensions['wavelength_res'][w].find('LR') >= 0: pilar@2: # Is there LR data? pilar@2: if v in data_lr: pilar@2: # Find position of w and t pilar@2: try: pilar@2: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@2: wp = dimensions_hr['wavelength'].index(wl) pilar@2: if wl in dimensions_lr['wavelength']: pilar@2: wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w]) pilar@2: else: pilar@2: wp = -1 pilar@2: # if dimensions['time'][t] in dimensions_lr['time']: pilar@2: tp = dimensions_lr['time'].index(dimensions['time'][t]) pilar@2: # else: pilar@2: # tp = -1 pilar@2: if wp > -1 and tp > -1: pilar@2: # Check if there is data pilar@2: a = data_lr[v][wp, tp, :] pilar@2: d = np.average(np.ma.masked_array(a, np.isnan(a))) pilar@2: if np.ma.min(d) is not np.ma.masked: pilar@2: print(f'\tWe have LR data for {v}') pilar@2: # Save data into common structure pilar@2: data_var[w, t, :] = a pilar@2: if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan( pilar@2: data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked: pilar@2: data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :] pilar@2: if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan( pilar@2: data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked: pilar@2: data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :] pilar@2: except ValueError: pilar@2: # i.e. no 1064 wavelength in dimensions_lr pilar@2: pass pilar@2: # except IndexError: pilar@2: # pass pilar@2: pilar@2: # Store data in the global matrices pilar@2: if variables_with_data.index(v) == 0: pilar@2: data_all = {v: data_var} pilar@2: data_error_positive = {v: data_error_positive_var} pilar@2: data_error_negative = {v: data_error_negative_var} pilar@2: else: pilar@2: data_all[v] = data_var pilar@2: data_error_positive[v] = data_error_positive_var pilar@2: data_error_negative[v] = data_error_negative_var pilar@2: pilar@2: # Plot pilar@2: timeiter = dimensions['time'] pilar@2: # Default plotting pilar@2: dimensions_t = dimensions pilar@2: for t in range(len(dimensions['time'])): pilar@2: print(f'plotting hr & lr for {timeiter[t]}') pilar@2: dimensions_t['time'] = [timeiter[t]] pilar@2: for vr in range(len(variables_with_data)): pilar@2: if vr == 0: pilar@2: data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]} pilar@2: else: pilar@2: data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :] pilar@2: pilar@2: fig = plot_vertical_profiles_plotly_allres( pilar@2: variables=variables_with_data, pilar@2: data=data_t, pilar@2: dimensions=dimensions_t, pilar@2: y_array=altitude, pilar@2: positive_error_array=data_error_positive, pilar@2: negative_error_array=data_error_negative, pilar@2: xaxis_label=variables_axis, pilar@2: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@2: title=station_location + " (" + station_id.upper() + ") - " + str(datetime.fromtimestamp(dimensions_t['time'][0]).strftime('%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution") pilar@2: pilar@2: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@2: fig.show() pilar@2: pilar@2: # Save the plot pilar@2: filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + datetime.fromtimestamp( pilar@2: dimensions['time'][0]).strftime('%Y%m%d-%H%M%S') + "_all.html" pilar@2: fig.write_html(html_path + filename) pilar@2: