pilar@2: # import nc as nc pilar@2: # import xarray as xr pilar@0: from config import * pilar@0: import plotly.graph_objects as go pilar@0: from plotly.subplots import make_subplots pilar@0: import netCDF4 as nc pilar@4: from datetime import datetime, timezone pilar@0: import numpy as np pilar@0: import sys pilar@0: from os.path import exists pilar@0: pilar@0: pilar@4: def check_group_presence(nc_file_path, group_name): pilar@4: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@4: if group_name in dataset.groups: pilar@4: print(f'{group_name} group exists') pilar@4: return True pilar@4: else: pilar@4: print(f'{group_name} group does not exist') pilar@4: return False pilar@4: pilar@4: pilar@0: def read_variable_in_group(nc_file_path, group_name, variable_name): pilar@0: try: pilar@0: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@0: # Find the group pilar@0: group = dataset.groups[group_name] pilar@0: pilar@0: # Access the variable pilar@0: try: pilar@0: variable = group.variables[variable_name] pilar@0: dimensions = variable.dimensions pilar@0: # print("Dimensions:", dimensions) pilar@0: # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} pilar@0: pilar@0: # variable_metadata.add = variable.long_name pilar@0: variable_metadata = \ pilar@0: {'name': variable.name.capitalize(), pilar@0: 'long_name': variable.long_name.capitalize(), pilar@0: 'units': variable.units, pilar@0: 'ndim': variable.ndim} pilar@0: pilar@0: # Create an index array for slicing the variable pilar@0: index_array = [slice(None)] * variable.ndim pilar@0: # Access the variable data using the index array pilar@0: variable_data = variable[index_array] pilar@0: pilar@0: # Fetch the dimension values pilar@0: dimension_values = {} pilar@0: for dim_name in dimensions: pilar@0: dim_values = group.variables[dim_name][:] pilar@0: dimension_values[dim_name] = dim_values pilar@0: # Adjust the index array for slicing pilar@0: index_array[variable.dimensions.index(dim_name)] = 0 pilar@0: pilar@0: # Now you have the variable data and its associated dimensions in a dictionary pilar@0: result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} pilar@0: pilar@0: # Print or use the result as needed pilar@0: # print(result) pilar@0: return result pilar@0: pilar@0: except KeyError: pilar@0: print("The specified variable does not exist in the group.") pilar@0: pilar@0: except FileNotFoundError: pilar@0: print(f"The file '{nc_file_path}' does not exist.") pilar@0: except Exception as e: pilar@0: print(f"The group {e} does not exist.") pilar@0: pilar@0: pilar@2: def read_global_attribute(nc_file_path, attribute_name): pilar@2: # Open the NetCDF file in read mode pilar@2: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@2: # with netCDF4.Dataset(nc_file_path, 'r') as nc: pilar@2: # Check if the attribute exists pilar@2: if attribute_name in dataset.ncattrs(): pilar@2: # Read the attribute value pilar@2: attribute_value = dataset.getncattr(attribute_name) pilar@2: return attribute_value pilar@2: else: pilar@2: print(f"Attribute '{attribute_name}' not found in the NetCDF file.") pilar@2: pilar@2: pilar@0: def read_variable_in_group2(nc_file_path, group_name, variable_name): pilar@0: try: pilar@0: with nc.Dataset(nc_file_path, 'r') as dataset: pilar@0: # Find the group pilar@0: group = dataset.groups[group_name] pilar@0: pilar@0: # Access the variable pilar@0: try: pilar@0: variable = group.variables[variable_name] pilar@0: dimensions = variable.dimensions pilar@0: # print("Dimensions:", dimensions) pilar@0: # variable_metadata = { variable.name, variable.long_name, variable.units, variable.ndim} pilar@0: pilar@0: # variable_metadata.add = variable.long_name pilar@0: variable_metadata = \ pilar@0: {'name': variable.name.capitalize(), pilar@0: 'long_name': variable.long_name.capitalize(), pilar@0: 'units': variable.units, pilar@0: 'ndim': variable.ndim} pilar@0: pilar@0: # Create an index array for slicing the variable pilar@0: index_array = [slice(None)] * variable.ndim pilar@0: # Access the variable data using the index array pilar@0: variable_data = variable[index_array] pilar@0: pilar@0: # Fetch the dimension values pilar@0: dimension_values = {} pilar@0: for dim_name in dimensions: pilar@0: dim_values = group.variables[dim_name][:] pilar@2: dimension_values[dim_name] = dim_values.tolist() pilar@0: # Adjust the index array for slicing pilar@0: index_array[variable.dimensions.index(dim_name)] = 0 pilar@0: pilar@0: # Now you have the variable data and its associated dimensions in a dictionary pilar@0: # result = {'data': variable_data, 'dimensions': dimension_values, 'metadata': variable_metadata} pilar@0: pilar@0: # Print or use the result as needed pilar@0: # print(result) pilar@0: return variable_data, dimension_values, variable_metadata pilar@0: pilar@0: except KeyError: pilar@0: print(f"The variable {variable_name} does not exist in the {group_name} group.") pilar@0: except FileNotFoundError: pilar@0: print(f"The file {nc_file_path} does not exist.") pilar@0: except Exception as e: pilar@0: print(f"The group {e} does not exist.") pilar@0: pilar@0: pilar@0: def plot_vertical_profiles_plotly(variables, data, dimensions, y_array, positive_error_array, negative_error_array, pilar@0: xaxis_label, yaxis_label, title, timeinrows): pilar@0: """ pilar@0: ToDo Complete pilar@0: Plot vertical profiles using plotly pilar@0: pilar@0: Parameters: pilar@0: - pilar@0: pilar@0: Returns: pilar@0: - fig: html pilar@0: """ pilar@0: pilar@0: # Set number of columns, number of rows, and subtitles if needed pilar@0: ncols = len(variables) pilar@0: subtitles = None pilar@0: if timeinrows is True: pilar@0: nrows = len(dimensions['time']) pilar@0: if nrows > 1: pilar@0: for t in range(nrows): pilar@0: for v in range(ncols): pilar@0: if subtitles is None: pilar@0: subtitles = [str(datetime.fromtimestamp(dimensions['time'][t]))] pilar@0: else: pilar@0: subtitles += [str(datetime.fromtimestamp(dimensions['time'][t]))] pilar@0: else: pilar@0: nrows = 1 pilar@0: subtitles = '' pilar@0: pilar@0: # Create figure pilar@0: fig = make_subplots(rows=nrows, cols=ncols, pilar@0: subplot_titles=subtitles, pilar@0: shared_yaxes=True) pilar@0: pilar@0: # Define colours pilar@0: clrs = [['rgb(0,35,255)', 'rgb(0,20,150)', 'rgb(0,170,250)'], # UV pilar@0: ['rgb(90,210,50)', 'rgb(40,90,25)', 'rgb(120,250,80)'], # VIS pilar@0: ['rgb(250,30,30)', 'rgb(200,25,30)', 'rgb(250,125,0)']] # IR pilar@0: clrs = np.array(clrs) pilar@0: pilar@0: # Plot pilar@0: for v in range(len(variables)): pilar@0: for t in range(len(dimensions['time'])): pilar@0: for w in range(len(dimensions['wavelength'])): pilar@0: tn = str(datetime.fromtimestamp(dimensions['time'][t])) pilar@0: wn = str(dimensions['wavelength'][w]) pilar@2: if timeinrows: pilar@0: lgnd = wn + ' nm' pilar@0: rown = t + 1 pilar@0: colorind = 0 pilar@2: # subtitle = str(datetime.fromtimestamp(dimensions['time'][t])) pilar@0: else: pilar@0: lgnd = wn + ' nm - ' + tn pilar@0: rown = 1 pilar@0: colorind = t pilar@0: pilar@0: # colour definition pilar@0: if abs(dimensions['wavelength'][w] - 1064) < 1: pilar@0: clr = clrs[2, colorind] pilar@0: # clr = 'rgb(235,70,20)' # 'rgba(215,27,96,0.4)' pilar@0: elif abs(dimensions['wavelength'][w] - 532) < 1: pilar@0: clr = clrs[1, colorind] pilar@0: elif abs(dimensions['wavelength'][w] - 355) < 1: pilar@0: clr = clrs[0, colorind] pilar@0: else: pilar@0: clr = 'rgb(0,0,0)' pilar@0: pilar@0: data_values = data[variables[v]][w, t, :] pilar@2: if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: pilar@2: vertical_levels = y_array["data"][t, :] pilar@2: error_values = positive_error_array[variables[v]][w, t, :] pilar@2: pilar@2: # Add trace pilar@2: fig.add_trace(go.Scatter( pilar@2: x=data_values, pilar@2: y=vertical_levels, pilar@2: error_x=dict(array=error_values, width=0, thickness=0.1), pilar@2: # ToDo Include distinction between positive and negative errors pilar@2: mode='lines', pilar@2: line=dict(width=2, color=clr), pilar@2: name=lgnd), pilar@2: row=rown, col=v + 1, pilar@2: ) pilar@2: pilar@2: # ToDo Check formatting options (showlegend, mapbox) pilar@2: fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) pilar@2: fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, pilar@2: col=v + 1) pilar@2: pilar@2: fig.update_xaxes(ticks="outside") pilar@2: fig.update_yaxes(ticks="outside", row=rown, col=v + 1) pilar@2: # ToDo check tickformat (i.e. tickformat=":.2e") pilar@2: if v == 0: pilar@2: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) pilar@2: pilar@2: # ToDo add boxes pilar@2: a = fig.layout['xaxis']['domain'] pilar@2: # print(a) pilar@2: b = fig.layout['yaxis']['domain'] pilar@2: # print(b) pilar@2: # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) pilar@2: # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], pilar@2: # line=dict(color="grey", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, pilar@2: # line=dict(color="blue", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, pilar@2: # line=dict(color="red", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, pilar@2: # line=dict(color="green", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_hline(y=0, line=dict(color="yellow", width=2)) pilar@2: # print(fig.data[0]['x']) pilar@2: pilar@2: # Set axis labels and title pilar@2: fig.update_layout( pilar@2: yaxis_title=yaxis_label, pilar@2: title=title, pilar@2: template="seaborn", # You can change the template as per your preference pilar@2: # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', pilar@2: # 'xgridoff', 'ygridoff', 'gridon', 'none'] pilar@2: height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), pilar@2: ) pilar@2: pilar@2: return fig pilar@2: pilar@2: pilar@2: def plot_vertical_profiles_plotly_allres(variables, data, dimensions, y_array, positive_error_array, pilar@2: negative_error_array, xaxis_label, yaxis_label, title): pilar@2: """ pilar@2: ToDo Complete pilar@2: Plot vertical profiles using plotly pilar@2: pilar@2: Parameters: pilar@2: - pilar@2: pilar@2: Returns: pilar@2: - fig: html pilar@2: """ pilar@2: pilar@2: # Set number of columns, number of rows, and subtitles if needed pilar@2: ncols = len(variables) pilar@2: nrows = 1 pilar@2: subtitles = '' pilar@0: pilar@2: # Create figure pilar@2: fig = make_subplots(rows=nrows, cols=ncols, pilar@2: subplot_titles=subtitles, pilar@2: shared_yaxes=True) pilar@2: pilar@2: # Define colours pilar@2: clrs = [['rgb(0,35,255)', 'rgb(0,20,150)'], # UV pilar@2: ['rgb(90,210,50)', 'rgb(40,90,25)'], # VIS pilar@2: ['rgb(250,30,30)', 'rgb(130,30,10)']] # IR pilar@2: clrs = np.array(clrs) pilar@2: pilar@2: # Plot pilar@2: for v in range(len(variables)): pilar@2: for t in range(len(dimensions['time'])): pilar@2: for w in range(len(dimensions['wavelength_res'])): pilar@2: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@2: wn = str(dimensions['wavelength_res'][w]) pilar@2: lgnd = wn.replace(' ', ' nm ') pilar@2: rown = 1 pilar@2: if wn.find('LR') > 0: pilar@2: colorind = 0 pilar@2: else: pilar@2: colorind = 1 pilar@2: pilar@2: # colour definition pilar@2: if abs(wl - 1064) < 1: pilar@2: clr = clrs[2, colorind] pilar@2: elif abs(wl - 532) < 1: pilar@2: clr = clrs[1, colorind] pilar@2: elif abs(wl - 355) < 1: pilar@2: clr = clrs[0, colorind] pilar@2: else: pilar@2: clr = 'rgb(0,0,0)' pilar@0: pilar@2: data_values = data[variables[v]][w, t, :] pilar@2: if np.average(np.ma.masked_array(data_values, np.isnan(data_values))) is not np.ma.masked: pilar@2: vertical_levels = y_array["data"][t, :] pilar@2: error_values = positive_error_array[variables[v]][w, t, :] pilar@2: # Add trace pilar@2: fig.add_trace(go.Scatter( pilar@2: x=data_values, pilar@2: y=vertical_levels, pilar@2: error_x=dict(array=error_values, width=0, thickness=0.1), pilar@2: # ToDo Include distinction between positive and negative errors pilar@2: mode='lines', pilar@2: line=dict(width=2, color=clr), pilar@2: name=lgnd), pilar@2: row=rown, col=v + 1, pilar@2: ) pilar@2: pilar@2: # ToDo Check formatting options (showlegend, mapbox) pilar@2: fig.update_xaxes({"title": xaxis_label[v]}, row=rown, col=v + 1) pilar@2: fig.update_xaxes({"mirror": True, "ticks": 'outside', "showline": True, "color": "black"}, row=rown, pilar@2: col=v + 1) pilar@0: pilar@2: fig.update_xaxes(ticks="outside") pilar@2: fig.update_yaxes(ticks="outside", row=rown, col=v + 1) pilar@2: # ToDo check tickformat (i.e. tickformat=":.2e") pilar@2: if v == 0: pilar@2: fig.update_yaxes({"title": yaxis_label}, row=rown, col=v + 1) pilar@2: pilar@2: # ToDo add boxes pilar@2: a = fig.layout['xaxis']['domain'] pilar@2: # print(a) pilar@2: b = fig.layout['yaxis']['domain'] pilar@2: # print(b) pilar@2: # fig.add_hline(y=b[0], line=dict(color="yellow", width=2), row=rown, col=v+1) pilar@2: # fig.add_hline(y=b[1], line=dict(color="purple", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[0], line=dict(color="orange", width=2), row=rown, col=v+1) pilar@2: # fig.add_vline(x=a[1], line=dict(color="pink", width=2), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=a[0], y0=b[0], x1=a[1], y1=b[1], pilar@2: # line=dict(color="grey", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=1, y0=1, x1=1, y1=0, pilar@2: # line=dict(color="blue", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=1, y1=0, pilar@2: # line=dict(color="red", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_shape(type="line", xref="paper", yref="paper", x0=0, y0=0, x1=0, y1=1, pilar@2: # line=dict(color="green", width=2, ), row=rown, col=v+1) pilar@2: # fig.add_hline(y=0, line=dict(color="yellow", width=2)) pilar@2: # print(fig.data[0]['x']) pilar@0: pilar@0: # Set axis labels and title pilar@0: fig.update_layout( pilar@0: yaxis_title=yaxis_label, pilar@0: title=title, pilar@0: template="seaborn", # You can change the template as per your preference pilar@0: # ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', pilar@0: # 'xgridoff', 'ygridoff', 'gridon', 'none'] pilar@0: height=max(600, 450 * rown), width=min(max(400 * len(variables), 750), 1400), pilar@0: ) pilar@0: pilar@0: return fig pilar@0: pilar@0: pilar@0: # Files for testing pilar@2: # Backscatter and extinction, no error: pilar@2: # netcdf_file = 'hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc' pilar@2: # Extinction with error: pilar@2: # netcdf_file = 'hpb_012_0000627_202308240200_202308240400_20230824hpb0200_ELDAmwl_v0.0.1.nc' pilar@4: # v0.0.3 pilar@4: # pot_012_0000721_202406081503_202406081601_20240608pot150N_ELDAmwl_v0.0.3.nc pilar@0: pilar@0: pilar@0: try: pilar@0: netcdf_file = sys.argv[1] pilar@0: except: pilar@0: print('Syntax is incorrect, you need to pass the filename of the file to be plotted.\n' pilar@0: 'Example: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') pilar@0: exit() pilar@0: pilar@2: # Print help pilar@2: if netcdf_file == '-h' or netcdf_file == '--h': pilar@2: print('Syntax: ./eldamwl_plot.py hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc') pilar@2: exit() pilar@2: pilar@0: netcdf_file_path = netcdf_path + netcdf_file pilar@0: pilar@0: file_exists = exists(netcdf_file_path) pilar@0: if not file_exists: pilar@0: print('The file that you provided does not exist.') pilar@0: exit() pilar@0: pilar@0: # Groups of data in the NetCDF file (low and high resolution) pilar@0: groups = ['lowres_products', 'highres_products'] pilar@0: groups_names = ['Low resolution', 'High resolution'] pilar@0: pilar@0: # Variables that we want to plot pilar@2: variable = ['backscatter', 'extinction', 'lidarratio', 'volumedepolarization', pilar@2: 'particledepolarization'] # ToDo Test particledepolarization when available pilar@0: # Errors associated to the variables pilar@0: variable_error = ['error_backscatter', 'error_extinction', 'error_lidarratio', pilar@0: 'positive_systematic_error_volumedepolarization', 'error_particledepolarization'] pilar@0: variable_negative_error_volumedepol = 'negative_systematic_error_volumedepolarization' pilar@2: # ToDo Consider adding other variables: pilar@0: # cloud_mask (time, level) pilar@0: pilar@2: # Read global attributes pilar@2: station_id = read_global_attribute(netcdf_file_path, 'station_ID') pilar@2: station_location = read_global_attribute(netcdf_file_path, 'location') pilar@0: pilar@2: # Read data pilar@4: h = 0 pilar@0: for g in range(len(groups)): pilar@4: if check_group_presence(netcdf_file_path, groups[g]): pilar@4: # Read the altitude variable pilar@4: altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude') pilar@0: pilar@4: if altitude['metadata']['units'] == 'm': pilar@4: altitude['data'] = altitude['data'] / 1000.0 pilar@4: altitude['metadata']['units'] = 'km' pilar@0: pilar@4: # Initialize variables pilar@4: data = None pilar@4: variables_with_data = None pilar@4: variables_axis = None pilar@0: pilar@4: # Read the data pilar@4: for v in range(len(variable)): pilar@4: # Initialize var pilar@4: var = None pilar@0: pilar@4: try: pilar@4: var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v]) pilar@4: except TypeError: pilar@4: print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object") pilar@0: pilar@4: if var is not None: # If the variable that we're trying to read exists pilar@4: if var.max() != var.min(): # If there is data in the array pilar@4: if variables_with_data is None: pilar@4: variables_with_data = [variable[v]] pilar@4: variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]'] pilar@4: else: pilar@4: variables_with_data += [variable[v]] pilar@4: variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]'] pilar@0: pilar@4: var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g], pilar@4: variable_error[v]) pilar@4: if variable[v] == 'volumedepolarization': pilar@4: var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], pilar@4: variable_negative_error_volumedepol) pilar@4: else: pilar@4: var_error_negative = var_error pilar@0: pilar@4: # Add read data to the data variable to have it all grouped pilar@4: if data is None: pilar@4: # Start populating data and data_error pilar@4: data = {variable[v]: var} pilar@4: data_error_positive = {variable[v]: var_error} pilar@4: data_error_negative = {variable[v]: var_error_negative} pilar@4: else: pilar@4: # Add more data to data and data_error pilar@4: data[variable[v]] = var pilar@4: data_error_positive[variable[v]] = var_error pilar@4: data_error_negative[variable[v]] = var_error_negative pilar@0: else: pilar@4: print(f'There is no data for {variable[v]} in the {groups_names[g]} group.') pilar@0: pilar@4: dimensions = dim pilar@0: pilar@4: # Save lowres and highres in variables to plot them together afterwards pilar@4: if groups[g] == 'lowres_products': pilar@4: altitude_lr = altitude pilar@4: dimensions_lr = dim pilar@4: data_lr = data pilar@4: data_error_positive_lr = data_error_positive pilar@4: data_error_negative_lr = data_error_negative pilar@4: variables_with_data_lr = variables_with_data pilar@4: variables_axis_lr = variables_axis pilar@4: if groups[g] == 'highres_products': pilar@4: altitude_hr = altitude pilar@4: dimensions_hr = dim pilar@4: data_hr = data pilar@4: data_error_positive_hr = data_error_positive pilar@4: data_error_negative_hr = data_error_negative pilar@4: variables_with_data_hr = variables_with_data pilar@4: variables_axis_hr = variables_axis pilar@2: pilar@4: # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows pilar@4: # If interactive is set to False, proceed with default values pilar@4: if h == 0: pilar@4: # Initialize pilar@4: timeinplots = True pilar@4: timeinrows = False pilar@4: # If interactive (config.py) = True, ask user how he/she wants the plot pilar@4: if interactive and len(dimensions['time']) > 1: pilar@0: answer = input( pilar@2: f"There are {len(dimensions['time'])} temporal profiles, " pilar@4: f"do you want to plot them on different plots [y/n]? ") pilar@4: if answer[0:1].lower() == 'n': pilar@4: timeinplots = False pilar@4: if not timeinplots: pilar@4: answer = input( pilar@4: f"There are {len(dimensions['time'])} temporal profiles, " pilar@4: f"do you want to plot them on different rows [y/n]? ") pilar@4: if answer[0:1].lower() == 'y': pilar@4: timeinrows = True pilar@4: pilar@4: if variables_with_data is not None: pilar@4: timeiter = dimensions['time'] pilar@4: if timeinplots: pilar@4: # Default plotting pilar@4: dimensions_t = dimensions pilar@4: for t in range(len(dimensions['time'])): pilar@4: dimensions_t['time'] = [timeiter[t]] pilar@4: for vr in range(len(variables_with_data)): pilar@4: if vr == 0: pilar@4: data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]} pilar@4: else: pilar@4: data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :] pilar@4: pilar@4: title = station_location + " (" + station_id.upper() + ") - " + str( pilar@4: datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( pilar@4: '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL " + groups_names[g] pilar@0: pilar@4: # Timestamps for the filename pilar@4: t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') pilar@4: if t + 1 < len(timeiter): pilar@4: t1 = datetime.fromtimestamp( pilar@4: timeiter[t+1], tz=timezone.utc).strftime('%Y%m%d%H%M') pilar@0: else: pilar@4: t1 = netcdf_file[29:41] pilar@4: pilar@4: filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_" + \ pilar@4: groups[g] + ".html" pilar@0: pilar@4: fig = plot_vertical_profiles_plotly( pilar@4: variables=variables_with_data, pilar@4: data=data_t, pilar@4: dimensions=dimensions_t, pilar@4: y_array=altitude, pilar@4: positive_error_array=data_error_positive, pilar@4: negative_error_array=data_error_negative, pilar@4: xaxis_label=variables_axis, pilar@4: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@4: title=title, pilar@4: timeinrows=True pilar@4: ) pilar@4: pilar@4: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@4: # fig.show() pilar@4: pilar@4: # Save the plot pilar@4: fig.write_html(html_path + filename) pilar@4: pilar@4: else: pilar@4: # Plotting in one plot (even if there are different time profiles) pilar@0: fig = plot_vertical_profiles_plotly( pilar@4: variables_with_data, pilar@4: data, pilar@4: dimensions, pilar@4: altitude, pilar@4: data_error_positive, pilar@4: data_error_negative, pilar@0: xaxis_label=variables_axis, pilar@0: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@2: title=station_location + " (" + station_id.upper() + ") - " + str( pilar@4: datetime.fromtimestamp(dimensions['time'][0], tz=timezone.utc).strftime( pilar@4: '%d/%m/%Y')) + " - ELDA MWL " + groups_names[ pilar@4: g], pilar@4: timeinrows=timeinrows pilar@0: ) pilar@0: pilar@0: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@2: # fig.show() pilar@0: pilar@0: # Save the plot pilar@4: if timeinrows: pilar@4: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( pilar@4: dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( pilar@4: dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[ pilar@4: g] + "_timeinrows.html" pilar@4: else: pilar@4: filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( pilar@4: dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( pilar@4: dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[g] + ".html" pilar@4: pilar@0: fig.write_html(html_path + filename) pilar@0: pilar@4: dimensions['time'] = timeiter pilar@4: pilar@0: else: pilar@4: print(f'There is no data to be plotted in the {groups_names[g]} group.') pilar@4: pilar@4: h = h + 1 pilar@4: pilar@4: # Plotting high and low resolution together pilar@4: if 'variables_with_data_lr' in locals() and 'variables_with_data_hr' in locals(): pilar@4: if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0: pilar@4: print(f'We have low and high resolution data, let''s combine it!') pilar@4: pilar@4: # Variables with data pilar@4: variables_with_data = list(variables_with_data_lr) pilar@4: variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data) pilar@4: print(f'Variables with data: {variables_with_data}') pilar@4: pilar@4: # Variables axis pilar@4: variables_axis = list(variables_axis_lr) pilar@4: variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis) pilar@4: pilar@4: # Joining dimensions for both resolutions pilar@4: # Wavelength pilar@4: wav = list(dimensions_lr['wavelength']) pilar@4: dhrw = list(dimensions_hr['wavelength']) pilar@4: wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav) pilar@4: dimensions['wavelength'] = wav pilar@4: # Add LR/HR to the wavelength pilar@4: wlr = list(dimensions_lr['wavelength']) pilar@4: for r in range(len(wlr)): pilar@4: wlr[r] = str(wlr[r]) + ' (LR)' pilar@4: whr = list(dhrw) pilar@4: for r in range(len(whr)): pilar@4: whr[r] = str(whr[r]) + ' (HR)' pilar@4: dimensions['wavelength_res'] = wlr + whr pilar@4: dimensions_hr['wavelength'] = dhrw pilar@4: dimensions['wavelength'] = wav pilar@4: # Time pilar@4: tim = list(dimensions_lr['time']) pilar@4: tim.extend(x for x in dimensions_hr['time'] if x not in tim) pilar@4: dimensions['time'] = tim pilar@4: # Level pilar@4: lev = list(dimensions_lr['level']) pilar@4: lev.extend(x for x in dimensions_hr['level'] if x not in lev) pilar@4: dimensions['level'] = lev pilar@4: pilar@4: # Populate the arrays with data and errors pilar@4: for v in variables_with_data: pilar@4: # Create array to store the joint data pilar@4: # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@4: data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@4: data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@4: data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) pilar@4: # ToDo check if needed pilar@4: data_var = np.ma.masked_values(data_var, 0) pilar@4: data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0) pilar@4: data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0) pilar@4: for w in range(len(dimensions['wavelength_res'])): pilar@4: for t in range(len(dimensions['time'])): pilar@4: # Incorporate data into the main array pilar@4: print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.") pilar@4: pilar@4: # High resolution pilar@4: if dimensions['wavelength_res'][w].find('HR') >= 0: pilar@4: # Is there HR data? pilar@4: if v in data_hr: pilar@4: try: pilar@4: # Find position of w and t pilar@4: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@4: wp = dimensions_hr['wavelength'].index(wl) pilar@4: wp = dhrw.index(wl) # ToDo test pilar@4: tp = dimensions_hr['time'].index(dimensions['time'][t]) pilar@4: # Check if there is data pilar@4: a = data_hr[v][wp, tp, :] pilar@4: d = np.average(np.ma.masked_array(a, np.isnan(a))) pilar@4: if np.ma.min(d) is not np.ma.masked: pilar@4: print(f'\tWe have HR data for {v}') pilar@4: # Save data into common structure pilar@4: data_var[w, t, :] = a pilar@4: if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan( pilar@4: data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked: pilar@4: data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :] pilar@4: if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan( pilar@4: data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked: pilar@4: data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :] pilar@4: except ValueError: pilar@4: pass pilar@4: pilar@4: # Low resolution pilar@4: if dimensions['wavelength_res'][w].find('LR') >= 0: pilar@4: # Is there LR data? pilar@4: if v in data_lr: pilar@4: # Find position of w and t pilar@4: try: pilar@4: wl = float(dimensions['wavelength_res'][w].split(' ')[0]) pilar@4: # wp = dimensions_hr['wavelength'].index(wl) # ToDo check pilar@4: if wl in dimensions_lr['wavelength']: pilar@4: wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w]) pilar@4: else: pilar@4: wp = -1 pilar@4: # if dimensions['time'][t] in dimensions_lr['time']: pilar@4: tp = dimensions_lr['time'].index(dimensions['time'][t]) pilar@4: # else: pilar@4: # tp = -1 pilar@4: if wp > -1 and tp > -1: pilar@4: # Check if there is data pilar@4: a = data_lr[v][wp, tp, :] pilar@4: d = np.average(np.ma.masked_array(a, np.isnan(a))) pilar@4: if np.ma.min(d) is not np.ma.masked: pilar@4: print(f'\tWe have LR data for {v}') pilar@4: # Save data into common structure pilar@4: data_var[w, t, :] = a pilar@4: if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan( pilar@4: data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked: pilar@4: data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :] pilar@4: if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan( pilar@4: data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked: pilar@4: data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :] pilar@4: except ValueError: pilar@4: # i.e. no 1064 wavelength in dimensions_lr pilar@4: pass pilar@4: # except IndexError: pilar@4: # pass pilar@4: pilar@4: # Store data in the global matrices pilar@4: if variables_with_data.index(v) == 0: pilar@4: data_all = {v: data_var} pilar@4: data_error_positive = {v: data_error_positive_var} pilar@4: data_error_negative = {v: data_error_negative_var} pilar@4: else: pilar@4: data_all[v] = data_var pilar@4: data_error_positive[v] = data_error_positive_var pilar@4: data_error_negative[v] = data_error_negative_var pilar@4: pilar@4: # Plot pilar@4: timeiter = dimensions['time'] pilar@4: # Default plotting pilar@4: dimensions_t = dimensions pilar@4: for t in range(len(dimensions['time'])): pilar@4: print(f'plotting hr & lr for {timeiter[t]}') pilar@4: dimensions_t['time'] = [timeiter[t]] pilar@4: for vr in range(len(variables_with_data)): pilar@4: if vr == 0: pilar@4: data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]} pilar@4: else: pilar@4: data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :] pilar@4: pilar@4: fig = plot_vertical_profiles_plotly_allres( pilar@4: variables=variables_with_data, pilar@4: data=data_t, pilar@4: dimensions=dimensions_t, pilar@4: y_array=altitude, pilar@4: positive_error_array=data_error_positive, pilar@4: negative_error_array=data_error_negative, pilar@0: xaxis_label=variables_axis, pilar@0: yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', pilar@2: title=station_location + " (" + station_id.upper() + ") - " + str( pilar@4: datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( pilar@4: '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution") pilar@0: pilar@0: # Show the plot (in Jupyter Notebook or Jupyter Lab) pilar@2: # fig.show() pilar@0: pilar@0: # Save the plot pilar@2: pilar@4: # Timestamps for the filename pilar@4: t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') pilar@4: if t + 1 < len(timeiter): pilar@4: t1 = datetime.fromtimestamp( pilar@4: timeiter[t + 1], tz=timezone.utc).strftime('%Y%m%d%H%M') pilar@4: else: pilar@4: t1 = netcdf_file[29:41] pilar@2: pilar@4: filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_all.html" pilar@4: fig.write_html(html_path + filename)