diff -r bd2181f8295e -r 75213aef70b8 eldamwl_plot.py --- a/eldamwl_plot.py Tue Jul 02 10:29:59 2024 +0000 +++ b/eldamwl_plot.py Wed Aug 28 11:28:24 2024 +0000 @@ -4,12 +4,22 @@ import plotly.graph_objects as go from plotly.subplots import make_subplots import netCDF4 as nc -from datetime import datetime +from datetime import datetime, timezone import numpy as np import sys from os.path import exists +def check_group_presence(nc_file_path, group_name): + with nc.Dataset(nc_file_path, 'r') as dataset: + if group_name in dataset.groups: + print(f'{group_name} group exists') + return True + else: + print(f'{group_name} group does not exist') + return False + + def read_variable_in_group(nc_file_path, group_name, variable_name): try: with nc.Dataset(nc_file_path, 'r') as dataset: @@ -365,6 +375,8 @@ # netcdf_file = 'hpb_012_0000598_201810172100_201810172300_20181017oh00_ELDAmwl_v0.0.1.nc' # Extinction with error: # netcdf_file = 'hpb_012_0000627_202308240200_202308240400_20230824hpb0200_ELDAmwl_v0.0.1.nc' +# v0.0.3 +# pot_012_0000721_202406081503_202406081601_20240608pot150N_ELDAmwl_v0.0.3.nc try: @@ -405,322 +417,355 @@ station_location = read_global_attribute(netcdf_file_path, 'location') # Read data +h = 0 for g in range(len(groups)): - # Read the altitude variable - altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude') + if check_group_presence(netcdf_file_path, groups[g]): + # Read the altitude variable + altitude = read_variable_in_group(netcdf_file_path, groups[g], 'altitude') - if altitude['metadata']['units'] == 'm': - altitude['data'] = altitude['data'] / 1000.0 - altitude['metadata']['units'] = 'km' + if altitude['metadata']['units'] == 'm': + altitude['data'] = altitude['data'] / 1000.0 + altitude['metadata']['units'] = 'km' - # Initialize variables - data = None - variables_with_data = None - variables_axis = None + # Initialize variables + data = None + variables_with_data = None + variables_axis = None - # Read the data - for v in range(len(variable)): - # Initialize var - var = None + # Read the data + for v in range(len(variable)): + # Initialize var + var = None - try: - var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v]) - except TypeError: - print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object") + try: + var, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], variable[v]) + except TypeError: + print(f"The variable {variable[v]} is not present. Cannot unpack non-iterable NoneType object") - if var is not None: # If the variable that we're trying to read exists - if var.max() != var.min(): # If there is data in the array - if variables_with_data is None: - variables_with_data = [variable[v]] - variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]'] - else: - variables_with_data += [variable[v]] - variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]'] + if var is not None: # If the variable that we're trying to read exists + if var.max() != var.min(): # If there is data in the array + if variables_with_data is None: + variables_with_data = [variable[v]] + variables_axis = [met['long_name'] + ' [ ' + met['units'] + ' ]'] + else: + variables_with_data += [variable[v]] + variables_axis += [met['long_name'] + ' [ ' + met['units'] + ' ]'] - var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g], - variable_error[v]) - if variable[v] == 'volumedepolarization': - var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], - variable_negative_error_volumedepol) - else: - var_error_negative = var_error + var_error, dim_error, met_error = read_variable_in_group2(netcdf_file_path, groups[g], + variable_error[v]) + if variable[v] == 'volumedepolarization': + var_error_negative, dim, met = read_variable_in_group2(netcdf_file_path, groups[g], + variable_negative_error_volumedepol) + else: + var_error_negative = var_error - # Add read data to the data variable to have it all grouped - if data is None: - # Start populating data and data_error - data = {variable[v]: var} - data_error_positive = {variable[v]: var_error} - data_error_negative = {variable[v]: var_error_negative} + # Add read data to the data variable to have it all grouped + if data is None: + # Start populating data and data_error + data = {variable[v]: var} + data_error_positive = {variable[v]: var_error} + data_error_negative = {variable[v]: var_error_negative} + else: + # Add more data to data and data_error + data[variable[v]] = var + data_error_positive[variable[v]] = var_error + data_error_negative[variable[v]] = var_error_negative else: - # Add more data to data and data_error - data[variable[v]] = var - data_error_positive[variable[v]] = var_error - data_error_negative[variable[v]] = var_error_negative - else: - print(f'There is no data for {variable[v]} in the {groups_names[g]} group.') + print(f'There is no data for {variable[v]} in the {groups_names[g]} group.') - dimensions = dim + dimensions = dim - # Save lowres and highres in variables to plot them together afterwards - if groups[g] == 'lowres_products': - altitude_lr = altitude - dimensions_lr = dim - data_lr = data - data_error_positive_lr = data_error_positive - data_error_negative_lr = data_error_negative - variables_with_data_lr = variables_with_data - variables_axis_lr = variables_axis - if groups[g] == 'highres_products': - altitude_hr = altitude - dimensions_hr = dim - data_hr = data - data_error_positive_hr = data_error_positive - data_error_negative_hr = data_error_negative - variables_with_data_hr = variables_with_data - variables_axis_hr = variables_axis + # Save lowres and highres in variables to plot them together afterwards + if groups[g] == 'lowres_products': + altitude_lr = altitude + dimensions_lr = dim + data_lr = data + data_error_positive_lr = data_error_positive + data_error_negative_lr = data_error_negative + variables_with_data_lr = variables_with_data + variables_axis_lr = variables_axis + if groups[g] == 'highres_products': + altitude_hr = altitude + dimensions_hr = dim + data_hr = data + data_error_positive_hr = data_error_positive + data_error_negative_hr = data_error_negative + variables_with_data_hr = variables_with_data + variables_axis_hr = variables_axis - # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows - # If interactive is set to False, proceed with default values - if g == 0: - # Initialize - timeinplots = True - timeinrows = False - # If interactive (config.py) = True, ask user how he/she wants the plot - if interactive and len(dimensions['time']) > 1: - answer = input( - f"There are {len(dimensions['time'])} temporal profiles, " - f"do you want to plot them on different plots [y/n]? ") - if answer[0:1].lower() == 'n': - timeinplots = False - if not timeinplots: + # If interactive is set to True, ask user if he/she wants to plot different temporal profiles in different rows + # If interactive is set to False, proceed with default values + if h == 0: + # Initialize + timeinplots = True + timeinrows = False + # If interactive (config.py) = True, ask user how he/she wants the plot + if interactive and len(dimensions['time']) > 1: answer = input( f"There are {len(dimensions['time'])} temporal profiles, " - f"do you want to plot them on different rows [y/n]? ") - if answer[0:1].lower() == 'y': - timeinrows = True + f"do you want to plot them on different plots [y/n]? ") + if answer[0:1].lower() == 'n': + timeinplots = False + if not timeinplots: + answer = input( + f"There are {len(dimensions['time'])} temporal profiles, " + f"do you want to plot them on different rows [y/n]? ") + if answer[0:1].lower() == 'y': + timeinrows = True + + if variables_with_data is not None: + timeiter = dimensions['time'] + if timeinplots: + # Default plotting + dimensions_t = dimensions + for t in range(len(dimensions['time'])): + dimensions_t['time'] = [timeiter[t]] + for vr in range(len(variables_with_data)): + if vr == 0: + data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]} + else: + data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :] + + title = station_location + " (" + station_id.upper() + ") - " + str( + datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( + '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL " + groups_names[g] - if variables_with_data is not None: - timeiter = dimensions['time'] - if timeinplots: - # Default plotting - dimensions_t = dimensions - for t in range(len(dimensions['time'])): - dimensions_t['time'] = [timeiter[t]] - for vr in range(len(variables_with_data)): - if vr == 0: - data_t = {variables_with_data[vr]: data[variables_with_data[vr]][:, t:t + 1, :]} + # Timestamps for the filename + t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') + if t + 1 < len(timeiter): + t1 = datetime.fromtimestamp( + timeiter[t+1], tz=timezone.utc).strftime('%Y%m%d%H%M') else: - data_t[variables_with_data[vr]] = data[variables_with_data[vr]][:, t:t + 1, :] + t1 = netcdf_file[29:41] + + filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_" + \ + groups[g] + ".html" + fig = plot_vertical_profiles_plotly( + variables=variables_with_data, + data=data_t, + dimensions=dimensions_t, + y_array=altitude, + positive_error_array=data_error_positive, + negative_error_array=data_error_negative, + xaxis_label=variables_axis, + yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', + title=title, + timeinrows=True + ) + + # Show the plot (in Jupyter Notebook or Jupyter Lab) + # fig.show() + + # Save the plot + fig.write_html(html_path + filename) + + else: + # Plotting in one plot (even if there are different time profiles) fig = plot_vertical_profiles_plotly( - variables=variables_with_data, - data=data_t, - dimensions=dimensions_t, - y_array=altitude, - positive_error_array=data_error_positive, - negative_error_array=data_error_negative, + variables_with_data, + data, + dimensions, + altitude, + data_error_positive, + data_error_negative, xaxis_label=variables_axis, yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', title=station_location + " (" + station_id.upper() + ") - " + str( - datetime.fromtimestamp(dimensions_t['time'][0]).strftime('%d/%m/%Y %H:%M:%S')) + - " - ELDA MWL " + groups_names[g], - timeinrows=True + datetime.fromtimestamp(dimensions['time'][0], tz=timezone.utc).strftime( + '%d/%m/%Y')) + " - ELDA MWL " + groups_names[ + g], + timeinrows=timeinrows ) # Show the plot (in Jupyter Notebook or Jupyter Lab) # fig.show() # Save the plot - filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + datetime.fromtimestamp( - dimensions['time'][0]).strftime('%Y%m%d-%H%M%S') + "_" + groups[g] + ".html" + if timeinrows: + filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( + dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( + dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[ + g] + "_timeinrows.html" + else: + filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( + dimensions['time'][0], tz=timezone.utc).strftime('%Y%m%d%H%M') + '-' + datetime.fromtimestamp( + dimensions['time'][-1], tz=timezone.utc).strftime('%Y%m%d%H%M') + "_" + groups[g] + ".html" + fig.write_html(html_path + filename) + dimensions['time'] = timeiter + else: - # Plotting in one plot (even if there are different time profiles) - fig = plot_vertical_profiles_plotly( - variables_with_data, - data, - dimensions, - altitude, - data_error_positive, - data_error_negative, + print(f'There is no data to be plotted in the {groups_names[g]} group.') + + h = h + 1 + +# Plotting high and low resolution together +if 'variables_with_data_lr' in locals() and 'variables_with_data_hr' in locals(): + if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0: + print(f'We have low and high resolution data, let''s combine it!') + + # Variables with data + variables_with_data = list(variables_with_data_lr) + variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data) + print(f'Variables with data: {variables_with_data}') + + # Variables axis + variables_axis = list(variables_axis_lr) + variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis) + + # Joining dimensions for both resolutions + # Wavelength + wav = list(dimensions_lr['wavelength']) + dhrw = list(dimensions_hr['wavelength']) + wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav) + dimensions['wavelength'] = wav + # Add LR/HR to the wavelength + wlr = list(dimensions_lr['wavelength']) + for r in range(len(wlr)): + wlr[r] = str(wlr[r]) + ' (LR)' + whr = list(dhrw) + for r in range(len(whr)): + whr[r] = str(whr[r]) + ' (HR)' + dimensions['wavelength_res'] = wlr + whr + dimensions_hr['wavelength'] = dhrw + dimensions['wavelength'] = wav + # Time + tim = list(dimensions_lr['time']) + tim.extend(x for x in dimensions_hr['time'] if x not in tim) + dimensions['time'] = tim + # Level + lev = list(dimensions_lr['level']) + lev.extend(x for x in dimensions_hr['level'] if x not in lev) + dimensions['level'] = lev + + # Populate the arrays with data and errors + for v in variables_with_data: + # Create array to store the joint data + # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev))) + data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) + data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) + data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) + # ToDo check if needed + data_var = np.ma.masked_values(data_var, 0) + data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0) + data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0) + for w in range(len(dimensions['wavelength_res'])): + for t in range(len(dimensions['time'])): + # Incorporate data into the main array + print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.") + + # High resolution + if dimensions['wavelength_res'][w].find('HR') >= 0: + # Is there HR data? + if v in data_hr: + try: + # Find position of w and t + wl = float(dimensions['wavelength_res'][w].split(' ')[0]) + wp = dimensions_hr['wavelength'].index(wl) + wp = dhrw.index(wl) # ToDo test + tp = dimensions_hr['time'].index(dimensions['time'][t]) + # Check if there is data + a = data_hr[v][wp, tp, :] + d = np.average(np.ma.masked_array(a, np.isnan(a))) + if np.ma.min(d) is not np.ma.masked: + print(f'\tWe have HR data for {v}') + # Save data into common structure + data_var[w, t, :] = a + if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan( + data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked: + data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :] + if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan( + data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked: + data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :] + except ValueError: + pass + + # Low resolution + if dimensions['wavelength_res'][w].find('LR') >= 0: + # Is there LR data? + if v in data_lr: + # Find position of w and t + try: + wl = float(dimensions['wavelength_res'][w].split(' ')[0]) + # wp = dimensions_hr['wavelength'].index(wl) # ToDo check + if wl in dimensions_lr['wavelength']: + wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w]) + else: + wp = -1 + # if dimensions['time'][t] in dimensions_lr['time']: + tp = dimensions_lr['time'].index(dimensions['time'][t]) + # else: + # tp = -1 + if wp > -1 and tp > -1: + # Check if there is data + a = data_lr[v][wp, tp, :] + d = np.average(np.ma.masked_array(a, np.isnan(a))) + if np.ma.min(d) is not np.ma.masked: + print(f'\tWe have LR data for {v}') + # Save data into common structure + data_var[w, t, :] = a + if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan( + data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked: + data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :] + if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan( + data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked: + data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :] + except ValueError: + # i.e. no 1064 wavelength in dimensions_lr + pass + # except IndexError: + # pass + + # Store data in the global matrices + if variables_with_data.index(v) == 0: + data_all = {v: data_var} + data_error_positive = {v: data_error_positive_var} + data_error_negative = {v: data_error_negative_var} + else: + data_all[v] = data_var + data_error_positive[v] = data_error_positive_var + data_error_negative[v] = data_error_negative_var + + # Plot + timeiter = dimensions['time'] + # Default plotting + dimensions_t = dimensions + for t in range(len(dimensions['time'])): + print(f'plotting hr & lr for {timeiter[t]}') + dimensions_t['time'] = [timeiter[t]] + for vr in range(len(variables_with_data)): + if vr == 0: + data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]} + else: + data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :] + + fig = plot_vertical_profiles_plotly_allres( + variables=variables_with_data, + data=data_t, + dimensions=dimensions_t, + y_array=altitude, + positive_error_array=data_error_positive, + negative_error_array=data_error_negative, xaxis_label=variables_axis, yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', title=station_location + " (" + station_id.upper() + ") - " + str( - datetime.fromtimestamp(dimensions['time'][0]).strftime('%d/%m/%Y')) + " - ELDA MWL " + groups_names[ - g], - timeinrows=timeinrows - ) + datetime.fromtimestamp(dimensions_t['time'][0], tz=timezone.utc).strftime( + '%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution") # Show the plot (in Jupyter Notebook or Jupyter Lab) # fig.show() # Save the plot - if timeinrows: - filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( - dimensions['time'][0]).strftime('%Y%m%d%H%M%S') + '-' + datetime.fromtimestamp( - dimensions['time'][-1]).strftime('%Y%m%d%H%M%S') + "_" + groups[g] + "_timeinrows.html" - else: - filename = "ELDAmwl_" + station_id.upper() + "_" + datetime.fromtimestamp( - dimensions['time'][0]).strftime('%Y%m%d%H%M%S') + '-' + datetime.fromtimestamp( - dimensions['time'][-1]).strftime('%Y%m%d%H%M%S') + "_" + groups[g] + ".html" - - fig.write_html(html_path + filename) - - else: - print(f'There is no data to be plotted in the {groups_names[g]} group.') - -# Plotting high and low resolution together -if len(variables_with_data_lr) > 0 and len(variables_with_data_hr) > 0: - print(f'We have low and high resolution data, let''s combine it!') - - # Variables with data - variables_with_data = list(variables_with_data_lr) - variables_with_data.extend(x for x in variables_with_data_hr if x not in variables_with_data) - print(f'Variables with data: {variables_with_data}') - - # Variables axis - variables_axis = list(variables_axis_lr) - variables_axis.extend(x for x in variables_axis_hr if x not in variables_axis) - - # Joining dimensions for both resolutions - # Wavelength - wav = list(dimensions_lr['wavelength']) - wav.extend(x for x in dimensions_hr['wavelength'] if x not in wav) - dimensions['wavelength'] = wav - # Add LR/HR to the wavelength - wlr = list(dimensions_lr['wavelength']) - for r in range(len(wlr)): - wlr[r] = str(wlr[r]) + ' (LR)' - whr = list(dimensions_hr['wavelength']) - for r in range(len(whr)): - whr[r] = str(whr[r]) + ' (HR)' - dimensions['wavelength_res'] = wlr + whr - # Time - tim = list(dimensions_lr['time']) - tim.extend(x for x in dimensions_hr['time'] if x not in tim) - dimensions['time'] = tim - # Level - lev = list(dimensions_lr['level']) - lev.extend(x for x in dimensions_hr['level'] if x not in lev) - dimensions['level'] = lev - - # Populate the arrays with data and errors - for v in variables_with_data: - # Create array to store the joint data - # data_var = np.ma.masked_all((len(dimensions['wavelength_res']), len(tim), len(lev))) - data_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) - data_error_positive_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) - data_error_negative_var = np.ma.zeros((len(dimensions['wavelength_res']), len(tim), len(lev))) - # ToDo check if needed - data_var = np.ma.masked_values(data_var, 0) - data_error_positive_var = np.ma.masked_values(data_error_positive_var, 0) - data_error_negative_var = np.ma.masked_values(data_error_negative_var, 0) - for w in range(len(dimensions['wavelength_res'])): - for t in range(len(dimensions['time'])): - # Incorporate data into the main array - print(f"{v} - Wavelength: {dimensions['wavelength_res'][w]}. Time: {dimensions['time'][t]}.") - # High resolution - if dimensions['wavelength_res'][w].find('HR') >= 0: - # Is there HR data? - if v in data_hr: - try: - # Find position of w and t - wl = float(dimensions['wavelength_res'][w].split(' ')[0]) - wp = dimensions_hr['wavelength'].index(wl) - tp = dimensions_hr['time'].index(dimensions['time'][t]) - # Check if there is data - a = data_hr[v][wp, tp, :] - d = np.average(np.ma.masked_array(a, np.isnan(a))) - if np.ma.min(d) is not np.ma.masked: - print(f'\tWe have HR data for {v}') - # Save data into common structure - data_var[w, t, :] = a - if np.average(np.ma.masked_array(data_error_positive_hr[v][wp, tp, :], np.isnan( - data_error_positive_hr[v][wp, tp, :]))) is not np.ma.masked: - data_error_positive_var[w, t, :] = data_error_positive_hr[v][wp, tp, :] - if np.average(np.ma.masked_array(data_error_negative_hr[v][wp, tp, :], np.isnan( - data_error_negative_hr[v][wp, tp, :]))) is not np.ma.masked: - data_error_negative_var[w, t, :] = data_error_negative_hr[v][wp, tp, :] - except ValueError: - pass + # Timestamps for the filename + t0 = datetime.fromtimestamp(timeiter[t], tz=timezone.utc).strftime('%Y%m%d%H%M') + if t + 1 < len(timeiter): + t1 = datetime.fromtimestamp( + timeiter[t + 1], tz=timezone.utc).strftime('%Y%m%d%H%M') + else: + t1 = netcdf_file[29:41] - # Low resolution - if dimensions['wavelength_res'][w].find('LR') >= 0: - # Is there LR data? - if v in data_lr: - # Find position of w and t - try: - wl = float(dimensions['wavelength_res'][w].split(' ')[0]) - wp = dimensions_hr['wavelength'].index(wl) - if wl in dimensions_lr['wavelength']: - wp = dimensions_lr['wavelength'].index(dimensions['wavelength'][w]) - else: - wp = -1 - # if dimensions['time'][t] in dimensions_lr['time']: - tp = dimensions_lr['time'].index(dimensions['time'][t]) - # else: - # tp = -1 - if wp > -1 and tp > -1: - # Check if there is data - a = data_lr[v][wp, tp, :] - d = np.average(np.ma.masked_array(a, np.isnan(a))) - if np.ma.min(d) is not np.ma.masked: - print(f'\tWe have LR data for {v}') - # Save data into common structure - data_var[w, t, :] = a - if np.average(np.ma.masked_array(data_error_positive_lr[v][wp, tp, :], np.isnan( - data_error_positive_lr[v][wp, tp, :]))) is not np.ma.masked: - data_error_positive_var[w, t, :] = data_error_positive_lr[v][wp, tp, :] - if np.average(np.ma.masked_array(data_error_negative_lr[v][wp, tp, :], np.isnan( - data_error_negative_lr[v][wp, tp, :]))) is not np.ma.masked: - data_error_negative_var[w, t, :] = data_error_negative_lr[v][wp, tp, :] - except ValueError: - # i.e. no 1064 wavelength in dimensions_lr - pass - # except IndexError: - # pass - - # Store data in the global matrices - if variables_with_data.index(v) == 0: - data_all = {v: data_var} - data_error_positive = {v: data_error_positive_var} - data_error_negative = {v: data_error_negative_var} - else: - data_all[v] = data_var - data_error_positive[v] = data_error_positive_var - data_error_negative[v] = data_error_negative_var - - # Plot - timeiter = dimensions['time'] - # Default plotting - dimensions_t = dimensions - for t in range(len(dimensions['time'])): - print(f'plotting hr & lr for {timeiter[t]}') - dimensions_t['time'] = [timeiter[t]] - for vr in range(len(variables_with_data)): - if vr == 0: - data_t = {variables_with_data[vr]: data_all[variables_with_data[vr]][:, t:t + 1, :]} - else: - data_t[variables_with_data[vr]] = data_all[variables_with_data[vr]][:, t:t + 1, :] - - fig = plot_vertical_profiles_plotly_allres( - variables=variables_with_data, - data=data_t, - dimensions=dimensions_t, - y_array=altitude, - positive_error_array=data_error_positive, - negative_error_array=data_error_negative, - xaxis_label=variables_axis, - yaxis_label=altitude['metadata']['long_name'] + ' [' + altitude['metadata']['units'] + ']', - title=station_location + " (" + station_id.upper() + ") - " + str(datetime.fromtimestamp(dimensions_t['time'][0]).strftime('%d/%m/%Y %H:%M:%S')) + " - ELDA MWL - High and low resolution") - - # Show the plot (in Jupyter Notebook or Jupyter Lab) - fig.show() - - # Save the plot - filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + datetime.fromtimestamp( - dimensions['time'][0]).strftime('%Y%m%d-%H%M%S') + "_all.html" - fig.write_html(html_path + filename) - + filename = "ELDAmwl_" + netcdf_file[0:3].upper() + "_" + t0 + "-" + t1 + "_all.html" + fig.write_html(html_path + filename)