Major changes in Licel-related classes, to better expose all information to top-level classes.

Sat, 06 Jan 2018 10:08:46 +0200

author
Iannis <i.binietoglou@impworks.gr>
date
Sat, 06 Jan 2018 10:08:46 +0200
changeset 116
d9703af687aa
parent 115
c2ecdcc2bb88
child 117
44a43b0e452f

Major changes in Licel-related classes, to better expose all information to top-level classes.

* Duration of timescales in read from the licel files.
* Fixed bugs related to ordering of channels when reading from licel files.
* Fixed V to mV convertion.
* First working converstion to a DIVA draft format.

atmospheric_lidar/diva.py file | annotate | diff | comparison | revisions
atmospheric_lidar/generic.py file | annotate | diff | comparison | revisions
atmospheric_lidar/licel.py file | annotate | diff | comparison | revisions
atmospheric_lidar/licel_depol.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/adam2017/adam2017.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/rali/rali.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/rali/rali_diva_parameters.yaml file | annotate | diff | comparison | revisions
--- a/atmospheric_lidar/diva.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/diva.py	Sat Jan 06 10:08:46 2018 +0200
@@ -4,20 +4,23 @@
 separately not to interfere with normal development.
 """
 import netCDF4 as netcdf
-import pyyaml
+import yaml
 import datetime
 import os
+import numpy as np
+
+import pytz
 
 from .generic import BaseLidarMeasurement
 
 
 class DivaOutput(BaseLidarMeasurement):
 
-    def save_as_diva(self, output_path, parameter_file):
+    def save_as_diva_netcdf(self, output_path, parameter_file):
         """ Save the current data in the 'draft' DIVA format. """
 
         with open(parameter_file, 'r') as f:
-            parameters = pyyaml.load(f)
+            parameters = yaml.load(f)
 
         global_parameters = parameters['global_parameters']  # Shortcut
         channels = parameters['channels']
@@ -96,28 +99,29 @@
             for channel_name, channel_parameters in channels.iteritems():
 
                 if channel_name not in self.channels.keys():
-                    raise ValueError('Channle name not one of {0}: {1}'.format(self.channels.keys()), channel_name)
+                    raise ValueError('Channel name not one of {0}: {1}'.format(self.channels.keys(), channel_name))
 
                 channel = self.channels[channel_name]
 
-                group_name = "channel_{0}".format(channel_name)  # Give channels groups a standard name
+                group_name = "channel_{0}".format(channel_name.replace('.', '_'))  # Give channels groups a standard name
+
                 g = f.createGroup(group_name)
                 g.long_name = channel_parameters['long_name']
-                g.detector_manufacturer = channel_parameters['detector_manifacturer']    # Optional
+                g.detector_manufacturer = channel_parameters['detector_manufacturer']    # Optional
                 g.detector_model = channel_parameters['detector_model']
                 g.daq_manufacturer = channel_parameters['daq_manufacturer']
                 g.daq_model = channel_parameters['daq_model']
 
                 # Dimensions
                 g.createDimension('profile', size=None)  # Infinite dimension
-                g.createDimension('range', len(self.range))
+                g.createDimension('range', len(channel.z))
 
                 # Variables
-                name = g.createVariable('channel_id', dimensions=('name_strlen',))
+                name = g.createVariable('channel_id', 'c', dimensions=('name_strlen',))
                 name.cf_role = 'timeseries_id'
                 name.long_name = 'channel identification'
 
-                laser_rep_rate = g.createVariable('laser_repetition_rate')
+                laser_rep_rate = g.createVariable('laser_repetition_rate', 'f4')
                 laser_rep_rate.long_name = 'nominal laser repetition rate'
                 laser_rep_rate.units = 'Hz'
 
@@ -166,44 +170,42 @@
                 polarizer_angle.units = 'degree'
                 polarizer_angle.comments = 'Optional'
 
-                dead_time_model = g.createVariable('dead_time_model', datatype='b')
-                dead_time_model.long_name = 'optimal dead time model of detection system'
-                dead_time_model.flag_values = '0b 1b 2b'
-                dead_time_model.flag_meanings = 'paralyzable non_paralyzable other'
+                if not channel.is_analog:
+                    dead_time_model = g.createVariable('dead_time_model', datatype='b')
+                    dead_time_model.long_name = 'optimal dead time model of detection system'
+                    dead_time_model.flag_values = '0b 1b 2b'
+                    dead_time_model.flag_meanings = 'paralyzable non_paralyzable other'
 
-                dead_time = g.createVariable('dead_time', datatype='f8')
-                dead_time.long_name = 'dead time value'
-                dead_time.units = 'ns'
-                dead_time.comment = "Manufacturer. Source of the value."
+                    dead_time = g.createVariable('dead_time', datatype='f8')
+                    dead_time.long_name = 'dead time value'
+                    dead_time.units = 'ns'
+                    dead_time.comment = "Manufacturer. Source of the value."
 
                 bin_length = g.createVariable('bin_length', datatype='f4')
                 bin_length.long_name = "time duration of each bin"
                 bin_length.units = 'ns'
 
-                pulse_delay = g.createVariable('pulse_delay', datatype='f4')
-                pulse_delay.long_name = "time difference between channel trigger and pulse emission"
-                pulse_delay.units = 'ns'
-                pulse_delay.comment = "Positive indicates pre-trigger channel. Negative a channle trigger delay."
+                if channel.is_analog:
+                    adc_bits = g.createVariable('adc_bits', datatype='i4')
+                    adc_bits.long_name = 'analog-to-digital converter bits'
+                    adc_bits.coordinates = "time"
 
                 detector_voltage = g.createVariable('detector_voltage', datatype='f4', dimensions=('profile',), zlib=True)
                 detector_voltage.long_name = 'detector voltage'
                 detector_voltage.units = 'V'
                 detector_voltage.coordinates = "time"
 
-                discriminator = g.createVariable('discriminator', datatype='f8', dimensions=('profiles',))
-                discriminator.long_name = 'discriminator level'
-                discriminator.units = ''
+                if channel.is_photon_counting:
+                    discriminator = g.createVariable('discriminator', datatype='f8', dimensions=('profile',))
+                    discriminator.long_name = 'discriminator level'
+                    discriminator.units = ''
 
-                adc_range = g.createVariable('adc_range', datatype='f4', dimensions=('profile',),
-                                                    zlib=True)
-                adc_range.long_name = 'analog-to-digital converter range'
-                adc_range.units = 'mV'
-                adc_range.coordinates = "time"
-
-                adc_bits = g.createVariable('adc_bits', datatype='i4', dimensions=('profile',),
-                                             zlib=True)
-                adc_bits.long_name = 'analog-to-digital converter bits'
-                adc_bits.coordinates = "time"
+                if channel.is_analog:
+                    adc_range = g.createVariable('adc_range', datatype='f4', dimensions=('profile',),
+                                                        zlib=True)
+                    adc_range.long_name = 'analog-to-digital converter range'
+                    adc_range.units = 'mV'
+                    adc_range.coordinates = "time"
 
                 pulses = g.createVariable('pulses', datatype='i4', dimensions=('profile',),
                                           zlib=True)
@@ -214,10 +216,10 @@
                 nd_filter.long_name = "neutral density filter optical depth "
                 nd_filter.coordinates = "time"
 
-                emission_delay = g.createVariable('emission_delay', datatype='f4')
-                emission_delay.long_name = "pulse emission difference from channel trigger"
-                emission_delay.units = 'ns'
-                emission_delay.comments = 'Positive values for pre-trigger systems. Negative for trigger delay.'
+                trigger_delay = g.createVariable('trigger_delay', datatype='f4')
+                trigger_delay.long_name = "chanenl trigger difference from pulse emission"
+                trigger_delay.units = 'ns'
+                trigger_delay.comments = 'Negative values for pre-trigger systems.'
 
                 time = g.createVariable('time', datatype='f8', dimensions=('profile',),
                                         zlib=True)
@@ -225,29 +227,36 @@
                 time.units = "seconds since 1970-01-01 00:00:00"
                 time.standard_name = "time"
                 time.bounds = "time_bnds"
-                g.createVariable('time_bnds', datatype='f8', dimensions=('profile', 'nv'), zlib=True)
+                time_bounds = g.createVariable('time_bnds', datatype='f8', dimensions=('profile', 'nv'), zlib=True)
 
-                bin_time = g.createVariable('bin_time', datatype='f4', dimensions=('range',),
-                                             zlib=True)  # Use name 'bin_range' to avoid conflict with built-in range
-                bin_time.long_name = 'bin start time since trigger'
+                bin_time = g.createVariable('bin_time', datatype='f4', dimensions=('range',), zlib=True)
+                bin_time.long_name = 'bin start time since channel trigger'
                 bin_time.units = "ns"
 
-                signal = g.createVariable('signal', datatype='i4', dimensions=('profile', 'range'),
+                if channel.is_analog:
+                    signal_units = 'mV'
+                    signal_datatype = 'f8'
+                else:
+                    signal_units = 'counts'
+                    signal_datatype = 'i8'
+
+                signal = g.createVariable('signal', datatype=signal_datatype, dimensions=('profile', 'range'),
                                           zlib=True)
                 signal.long_name = 'signal'
-                signal.units = channel_parameters['signal_units']
+                signal.units = signal_units
                 signal.coordinates = "time"
                 signal.ancillary_variables = "signal_stddev"
 
                 # If measured
-                signal_stddev = g.createVariable('signal', datatype='i4', dimensions=('profile', 'range'),
+                signal_stddev = g.createVariable('signal_stddev', datatype=signal_datatype, dimensions=('profile', 'range'),
                                           zlib=True)
                 signal_stddev.long_name = 'signal standard deviation'
-                signal_stddev.units = channel_parameters['signal_units']
+                signal_stddev.units = signal_units
                 signal_stddev.coordinates = "time"
+                signal_stddev.comments = "Only if measured. Should be removed if not."
 
                 # Assign variables
-                name[:] = channel_name
+                name[:len(channel_name)] = channel_name
                 laser_rep_rate[:] = channel_parameters['laser_repetition_rate']
                 emission_energy[:] = channel_parameters['emission_energy']
                 emission_pol[:] = self._emission_pol_flag(channel_parameters['emission_polarization'])
@@ -256,14 +265,34 @@
                 detection_mode[:] = self._detection_mode_flag(channel_parameters['detection_mode'])
                 detection_fwhm[:] = channel_parameters['filter_fwhm']
                 detection_pol[:] = self._detection_pol_flag(channel_parameters['detection_polarization'])
-                polarizer_angle[:] = channel_parameters['polarizer_angle']  # For now, assumed constant.
-                dead_time_model[:] = self._deadtime_model_flag(channel_parameters['dead_time_model'])
-                dead_time[:] = channel_parameters['dead_time']
+                polarizer_angle[:] = channel_parameters['polarizer_angle'] * np.ones(len(channel.time))  # For now, assumed constant.
+
+                if channel.is_photon_counting:
+                    dead_time_model[:] = self._deadtime_model_flag(channel_parameters['dead_time_model'])
+                    dead_time[:] = channel_parameters['dead_time']
+
                 bin_length[:] = channel_parameters['bin_length']
-                pulse_delay[:] = channel_parameters['pulse_delay']
+                trigger_delay[:] = channel_parameters['trigger_delay']
+
+                detector_voltage[:] = channel.hv
+
+                if channel.is_analog:
+                    adc_range[:] = channel.discriminator
+                    adc_bits[:] = channel.adcbits
+                else:
+                    discriminator[:] = channel.discriminator
 
-                detector_voltage[:] = channel
+                pulses[:] = channel.laser_shots
 
+                epoch = datetime.datetime(2017, 1, 1, tzinfo=pytz.utc)
+                seconds_since_epoch = [(t - epoch).total_seconds() for t in channel.time]
+                time[:] = seconds_since_epoch
+                time_bounds[:, 0] = seconds_since_epoch
+                time_bounds[:, 1] = seconds_since_epoch + channel.get_duration()
+
+                bin_time[:] = channel.binwidth * np.arange(len(channel.z))
+
+                signal[:] = channel.matrix
 
     def _deadtime_model_flag(self, model_str):
         """ Convert dead-time model string to byte flag.
@@ -364,11 +393,11 @@
         : int
            Byte encoding of polarization state
         """
-        choices = {'linaer': 0,
+        choices = {'linear': 0,
                    'circular': 1,
                    'none': 2}
 
-        if  pol_string not in choices.keys():
+        if pol_string not in choices.keys():
             raise ValueError('Emission polarization not one of {0}: {1}'.format(choices.keys(), pol_string))
 
         return choices[pol_string]
--- a/atmospheric_lidar/generic.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/generic.py	Sat Jan 06 10:08:46 2018 +0200
@@ -1,6 +1,7 @@
 import datetime
 import logging
 from operator import itemgetter
+import itertools
 
 import matplotlib as mpl
 import netCDF4 as netcdf
@@ -87,7 +88,7 @@
         stop_time = []
         points = []
         all_time_scales = []
-        channel_name_list = []
+        channel_names = []
 
         # Get the information from all the channels
         for channel_name, channel in self.channels.items():
@@ -96,7 +97,7 @@
             stop_time.append(channel.stop_time)
             points.append(channel.points)
             all_time_scales.append(channel.time)
-            channel_name_list.append(channel_name)
+            channel_names.append(channel_name)
 
         # Find the unique time scales used in the channels
         time_scales = set(all_time_scales)
@@ -112,6 +113,17 @@
         # self.dimensions['scan angles'] = 1
         self.dimensions['nb_of_time_scales'] = len(time_scales)
 
+        # Make a dictionaries to match time scales and channels
+        channel_timescales = {}
+        timescale_channels = dict((ts, []) for ts in time_scales)
+        for (channel_name, current_time_scale) in zip(channel_names, all_time_scales):
+            for (ts, n) in zip(time_scales, range(len(time_scales))):
+                if current_time_scale == ts:
+                    channel_timescales[channel_name] = n
+                    timescale_channels[ts].append(channel_name)
+
+        self.variables['id_timescale'] = channel_timescales
+
         # Update the variables dictionary
         # Write time scales in seconds
         raw_data_start_time = []
@@ -122,44 +134,16 @@
             raw_start_in_seconds = np.array([t.seconds for t in raw_start_time])  # Convert in seconds
             raw_data_start_time.append(raw_start_in_seconds)  # And add to the list
 
-            duration = self._get_duration(raw_start_in_seconds)
+            channel_name = timescale_channels[current_time_scale][0]
+            channel = self.channels[channel_name]
 
             # TODO: Define stop time for each measurement based on real data
-            raw_stop_in_seconds = raw_start_in_seconds + duration
+            raw_stop_in_seconds = raw_start_in_seconds + channel.get_duration()
             raw_data_stop_time.append(raw_stop_in_seconds)
 
         self.variables['Raw_Data_Start_Time'] = raw_data_start_time
         self.variables['Raw_Data_Stop_Time'] = raw_data_stop_time
 
-        # Make a dictionary to match time scales and channels
-        channel_timescales = []
-        for (channel_name, current_time_scale) in zip(channel_name_list, all_time_scales):
-            for (ts, n) in zip(time_scales, range(len(time_scales))):
-                if current_time_scale == ts:
-                    channel_timescales.append([channel_name, n])
-        self.variables['id_timescale'] = dict(channel_timescales)
-
-    def _get_duration(self, raw_start_in_seconds):
-        """ 
-        Return the duration for a given time scale. 
-       
-        In some files (e.g. Licel) this can be specified from the files themselves. 
-        In others this must be guessed.
-        
-        Parameters
-        ----------
-        raw_start_in_seconds : array
-           An array of floats, representing the start time of each measurement profile.
-        
-        Returns
-        -------
-        duration : float
-           Guess of the duration of each bin in the timescale
-        """
-        duration = np.diff(raw_start_in_seconds)[0]
-
-        return duration
-
     def subset_by_channels(self, channel_subset):
         """ 
         Create a measurement object containing only a subset of  channels. 
@@ -205,8 +189,8 @@
         common_channels = list(set(scc_channels).intersection(self.channels.keys()))
 
         if not common_channels:
-            logging.debug("Config channels: %s." % ','.join(scc_channels))
-            logging.debug("Licel channels: %s." % ','.join(self.channels.keys()))
+            logger.debug("Config channels: %s." % ','.join(scc_channels))
+            logger.debug("Licel channels: %s." % ','.join(self.channels.keys()))
             raise ValueError('No common channels between licel and configuration files.')
 
         return self.subset_by_channels(common_channels)
@@ -362,20 +346,20 @@
         filename : str
            Output file name. If None, <measurement_id>.nc will be used. 
         """
-        params = self.extra_netcdf_parameters
+        parameters = self.extra_netcdf_parameters
 
         # Guess measurement ID if none is provided
         if 'Measurement_ID' not in self.info:
             self.set_measurement_id()
 
         # Check if temperature and pressure are defined
-        for parameter in ['Temperature', 'Pressure']:
-            stored_value = self.info.get(parameter, None)
+        for attribute in ['Temperature', 'Pressure']:
+            stored_value = self.info.get(attribute, None)
             if stored_value is None:
                 try:
                     self.set_PT()
                 except:
-                    raise ValueError('A value needs to be specified for %s' % parameter)
+                    raise ValueError('No value specified for %s' % attribute)
 
         if not filename:
             filename = "%s.nc" % self.info['Measurement_ID']
@@ -388,25 +372,23 @@
                       'nb_of_time_scales': 1,
                       'scan_angles': 1, }  # Mandatory dimensions. Time bck not implemented
 
-        global_att = {'Measurement_ID': None,
-                      'RawData_Start_Date': None,
-                      'RawData_Start_Time_UT': None,
-                      'RawData_Stop_Time_UT': None,
-                      'RawBck_Start_Date': None,
-                      'RawBck_Start_Time_UT': None,
-                      'RawBck_Stop_Time_UT': None,
-                      'Sounding_File_Name': None,
-                      'LR_File_Name': None,
-                      'Overlap_File_Name': None,
-                      'Location': None,
-                      'System': None,
-                      'Latitude_degrees_north': None,
-                      'Longitude_degrees_east': None,
-                      'Altitude_meter_asl': None}
+        global_attributes = {'Measurement_ID': None,
+                             'RawData_Start_Date': None,
+                             'RawData_Start_Time_UT': None,
+                             'RawData_Stop_Time_UT': None,
+                             'RawBck_Start_Date': None,
+                             'RawBck_Start_Time_UT': None,
+                             'RawBck_Stop_Time_UT': None,
+                             'Sounding_File_Name': None,
+                             'LR_File_Name': None,
+                             'Overlap_File_Name': None,
+                             'Location': None,
+                             'System': None,
+                             'Latitude_degrees_north': None,
+                             'Longitude_degrees_east': None,
+                             'Altitude_meter_asl': None}
 
-        channel_variables = self._get_scc_mandatory_channel_variables()
-
-        channels = self.channels.keys()
+        channel_names = self.channels.keys()
 
         input_values = dict(self.dimensions, **self.variables)
 
@@ -416,16 +398,17 @@
         input_values['RawData_Start_Time_UT'] = self.info['start_time'].strftime('%H%M%S')
         input_values['RawData_Stop_Time_UT'] = self.info['stop_time'].strftime('%H%M%S')
 
-        # Add some optional global attributes
-        input_values['System'] = params.general_parameters['System']
-        input_values['Latitude_degrees_north'] = params.general_parameters['Latitude_degrees_north']
-        input_values['Longitude_degrees_east'] = params.general_parameters['Longitude_degrees_east']
-        input_values['Altitude_meter_asl'] = params.general_parameters['Altitude_meter_asl']
+        # Add any global attributes provided by the subclass
+        for attribute in self._get_custom_global_attributes():
+            input_values[attribute["name"]] = attribute["value"]
 
-        # Override general parameters with those provided by any subclass
-        # in a custom fashion
-        for param in self.get_custom_general_parameters():
-            input_values[param["name"]] = param["value"]
+        # Override global attributes, if provided in the settings file.
+        for attribute_name in ['System', 'Latitude_degrees_north', 'Longitude_degrees_east', 'Altitude_meter_asl']:
+            if attribute_name in parameters.general_parameters.keys():
+                if attribute_name in input_values:
+                    logger.info("Overriding {0} attribute, using the value provided in the parameter file.".format(
+                        attribute_name))
+                input_values[attribute_name] = parameters.general_parameters[attribute_name]
 
         # Open a netCDF file. The format is specified in the beginning of this module.
         with netcdf.Dataset(filename, 'w', format=NETCDF_FORMAT) as f:
@@ -436,69 +419,70 @@
                 f.createDimension(d, v)
 
             # Create global attributes
-            for (attrib, value) in global_att.iteritems():
+            for (attrib, value) in global_attributes.iteritems():
                 val = input_values.pop(attrib, value)
                 if val:
                     setattr(f, attrib, val)
 
             # Variables
-
             # Write either channel_id or string_channel_id in the file
-            first_channel_keys = params.channel_parameters.items()[0][1].keys()
+            first_channel_keys = parameters.channel_parameters.items()[0][1].keys()
             if "channel_ID" in first_channel_keys:
                 channel_var = 'channel_ID'
                 variable_type = 'i'
-            elif "channel string ID" in first_channel_keys:
-                channel_var = 'channel string ID'
+            elif "channel_string_ID" in first_channel_keys:
+                channel_var = 'channel_string_ID'
                 variable_type = str
             else:
                 raise ValueError('Channel parameters should define either "chanel_id" or "channel_string_ID".')
 
             temp_v = f.createVariable(channel_var, variable_type, ('channels',))
-            for n, channel in enumerate(channels):
-                temp_v[n] = params.channel_parameters[channel][channel_var]
+            for n, channel in enumerate(channel_names):
+                temp_v[n] = parameters.channel_parameters[channel][channel_var]
 
-            # Write the custom subclass parameters:
-            for param in self.get_custom_channel_parameters():
-                temp_v = f.createVariable(param["name"], param["type"], param["dimensions"])
-
-                for (value, n) in zip(param["values"], range(len(param["values"]))):
-                    temp_v[n] = value
+            # Write the custom subclass variables:
+            for variable in self._get_custom_variables(channel_names):
+                temp_v = f.createVariable(variable["name"], variable["type"], variable["dimensions"])
+                temp_v[:] = variable["values"]
 
             # Write the values of fixed channel parameters:
-            fill_value = -9999
-            for param in self._get_provided_extra_parameters():
-                if param in channel_variables.keys():
-                    try:
-                        temp_v = f.createVariable(param, channel_variables[param][1], channel_variables[param][0])
-                    except RuntimeError:
-                        logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
-                        continue
-                else:
+            channel_variable_specs = self._get_scc_channel_variables()
+
+            for variable_name in self._get_provided_variable_names():
+
+                # Check if the variable already defined (e.g. from values in Licel files).
+                if variable_name in f.variables.keys():
+                    logger.warning(
+                        "Provided values of \"{0}\" were ignored because they were read from other source.".format(
+                            variable_name))
+                    continue
+
+                if variable_name not in channel_variable_specs.keys():
+                    logger.warning("Provided variable {0} is not parts of the specs: {1}".format(variable_name, channel_variable_specs.keys()))
+                    continue
+
+                temp_v = f.createVariable(variable_name,
+                                          channel_variable_specs[variable_name][1],
+                                          channel_variable_specs[variable_name][0])
+
+                for n, channel_name in enumerate(channel_names):
                     try:
-                        temp_v = f.createVariable(param, 'd', ('channels',), fill_value=fill_value)
-                    except RuntimeError:
-                        logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
-                        continue
-
-                for (channel, n) in zip(channels, range(len(channels))):
-                    try:
-                        temp_v[n] = params.channel_parameters[channel][param]
+                        temp_v[n] = parameters.channel_parameters[channel_name][variable_name]
                     except KeyError:  # The parameter was not provided for this channel so we mask the value.
-                        temp_v[n] = fill_value
+                        pass  # Default value should be a missing value  # TODO: Check this.
 
             # Write the id_timescale values
             temp_id_timescale = f.createVariable('id_timescale', 'i', ('channels',))
-            for (channel, n) in zip(channels, range(len(channels))):
+            for (channel, n) in zip(channel_names, range(len(channel_names))):
                 temp_id_timescale[n] = self.variables['id_timescale'][channel]
 
             # Laser pointing angle
             temp_v = f.createVariable('Laser_Pointing_Angle', 'd', ('scan_angles',))
-            temp_v[:] = params.general_parameters['Laser_Pointing_Angle']
+            temp_v[:] = parameters.general_parameters['Laser_Pointing_Angle']
 
             # Molecular calculation
             temp_v = f.createVariable('Molecular_Calc', 'i')
-            temp_v[:] = params.general_parameters['Molecular_Calc']
+            temp_v[:] = parameters.general_parameters['Molecular_Calc']
 
             # Laser pointing angles of profiles
             temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles', 'i', ('time', 'nb_of_time_scales'))
@@ -516,22 +500,23 @@
                 temp_raw_stop[:len(stop_time), n] = stop_time
 
             # Laser shots
-            try:
+            if "Laser_Shots" in f.variables.keys():
+                logger.warning("Provided values of \"Laser_Shots\" were ignored because they were read from other source.")
+            else:
                 temp_v = f.createVariable('Laser_Shots', 'i', ('time', 'channels'))
-                for (channel, n) in zip(channels, range(len(channels))):
+                for (channel, n) in zip(channel_names, range(len(channel_names))):
                     time_length = len(self.variables['Raw_Data_Start_Time'][self.variables['id_timescale'][channel]])
-                    # Array slicing stoped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
-                    temp_v[:time_length, n] = np.ones(time_length) * params.channel_parameters[channel]['Laser_Shots']
-            except RuntimeError:
-                logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % "LaserShots")
+                    # Array slicing stopped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
+                    temp_v[:time_length, n] = np.ones(time_length) * parameters.channel_parameters[channel][
+                        'Laser_Shots']
 
             # Raw lidar data
             temp_v = f.createVariable('Raw_Lidar_Data', 'd', ('time', 'channels', 'points'))
-            for (channel, n) in zip(channels, range(len(channels))):
+            for (channel, n) in zip(channel_names, range(len(channel_names))):
                 c = self.channels[channel]
                 temp_v[:len(c.time), n, :c.points] = c.matrix
 
-            self.add_dark_measurements_to_netcdf(f, channels)
+            self.add_dark_measurements_to_netcdf(f, channel_names)
 
             # Pressure at lidar station
             temp_v = f.createVariable('Pressure_at_Lidar_Station', 'd')
@@ -543,42 +528,44 @@
 
             self.save_netcdf_extra(f)
 
-    def _get_scc_mandatory_channel_variables(self):
+    def _get_scc_channel_variables(self):
         channel_variables = \
             {'Background_Low': (('channels',), 'd'),
              'Background_High': (('channels',), 'd'),
+             'Laser_Repetition_Rate': (('channels',), 'i'),
+             'Scattering_Mechanism': (('channels',), 'i'),
+             'Signal_Type': (('channels',), 'i'),
+             'Emitted_Wavelength': (('channels',), 'd'),
+             'Detected_Wavelength': (('channels',), 'd'),
+             'Raw_Data_Range_Resolution': (('channels',), 'd'),
+             'Background_Mode': (('channels',), 'i'),
+             'Dead_Time_Corr_Type': (('channels',), 'i'),
+             'Dead_Time': (('channels',), 'd'),
+             'Acquisition_Mode': (('channels',), 'i'),
+             'Trigger_Delay': (('channels',), 'd'),
+             'Pol_Calib_Range_Min': (('channels',), 'i'),
+             'Pol_Calib_Range_Max': (('channels',), 'i'),
              'LR_Input': (('channels',), 'i'),
              'DAQ_Range': (('channels',), 'd'),
              }
         return channel_variables
 
-    def _get_provided_extra_parameters(self):
-        # When looking for non-mandatory channel parameters, ignore the following parameter names:
-        ignore = ['channel_ID', 'channel string ID', 'Depolarization_Factor', 'Laser_Shots']
+    def _get_provided_variable_names(self):
+        """ Return a list of """
+        # When looking for channel parameters, ignore the following parameter names:
+        ignore = {'channel_ID', 'channel_string_ID', 'Depolarization_Factor', 'Laser_Shots'}  # Set
 
         channels = self.channels.keys()
-        params = self.extra_netcdf_parameters.channel_parameters
-        mandatory = self._get_scc_mandatory_channel_variables()
+        channel_parameters = self.extra_netcdf_parameters.channel_parameters
 
-        # Get all the provided extra parameters (both mandatory and optional):
-        provided_extra_parameters = []
-        for (channel, n) in zip(channels, range(len(channels))):
-            # Check all the mandatory parameters are provided for each of the channels:
-            for var in mandatory.keys():
-                if var not in params[channel].keys():
-                    raise ValueError("Mandatory parameter '{0}' not provided for channel {1}!".format(
-                        var,
-                        channel
-                    ))
+        # Get all the provided parameters (both mandatory and optional):
+        all_provided_variables = [channel_parameters[channel].keys() for channel in channels]
+        provided_variables = set(itertools.chain.from_iterable(all_provided_variables))
 
-            provided_extra_parameters.extend(params[channel].keys())
+        # Discard certain parameter names:
+        provided_variables -= ignore
 
-        provided_extra_parameters = set(provided_extra_parameters)
-        # Discard certain parameter names:
-        for param in ignore:
-            provided_extra_parameters.discard(param)
-
-        return provided_extra_parameters
+        return provided_variables
 
     def add_dark_measurements_to_netcdf(self, f, channels):
         """
@@ -644,19 +631,18 @@
     def get_dark_measurements(self):
         return None
 
-    def get_custom_general_parameters(self):
+    def _get_custom_global_attributes(self):
         """
-        Abstract method to provide custom NetCDF parameters  that should be included 
+        Abstract method to provide NetCDF global attributes that should be included
         in the final NetCDF file.
         
         If required, this method should be implemented by subclasses of BaseLidarMeasurement.
         """
         return []
 
-    def get_custom_channel_parameters(self):
+    def _get_custom_variables(self, channel_names=None):
         """
-        Abstract method to provide custom NetCDF parameters for the channels in this 
-        measurement that should be included in the final NetCDF file. 
+        Abstract method to provide custom NetCDF variables that should be included in the final NetCDF file.
         
         If required, this method should be implemented by subclasses of BaseLidarMeasurement.
         """
@@ -693,7 +679,29 @@
             len(channel_parameters['data'])) * self.resolution + self.resolution / 2.0  # Change: add half bin in the z
         self.points = len(channel_parameters['data'])
         self.rc = []
-        self.duration = 60
+
+    def get_duration(self):
+        """ Get an array with the duration of each profile in seconds.
+
+        If the duration property already exists, returns this.
+        If not, it tries to estimate it based on the time difference of the profiles.
+
+        Returns
+        -------
+        duration : ndarray
+           1D array containing profile durations.
+        """
+
+        current_value = getattr(self, 'duration', None)
+
+        if current_value:
+            return np.array(current_value)
+
+        time_diff = np.diff(self.time)
+        durations = [dt.seconds for dt in time_diff]
+        # Assume the last two profiles have the same duration
+        duration = np.array(durations)
+        return duration
 
     def calculate_rc(self, idx_min=4000, idx_max=5000):
         """ Calculate range corrected signal.
@@ -717,7 +725,7 @@
         Update the time parameters and data according to the raw input data. 
         """
         self.start_time = min(self.data.keys())
-        self.stop_time = max(self.data.keys()) + datetime.timedelta(seconds=self.duration)
+        self.stop_time = max(self.data.keys()) + datetime.timedelta(seconds=self.duration[-1])
         self.time = tuple(sorted(self.data.keys()))
         sorted_data = sorted(self.data.iteritems(), key=itemgetter(0))
         self.matrix = np.array(map(itemgetter(1), sorted_data))
@@ -738,10 +746,12 @@
         profile_idx : int
            The index of the selected profile.
         """
-        margin = datetime.timedelta(seconds=self.duration * 5)
+        max_duration = np.max(self.duration)
+
+        margin = datetime.timedelta(seconds=max_duration * 5)
 
         if ((input_time + margin) < self.start_time) | ((input_time - margin) > self.stop_time):
-            logging.error("Requested date not covered in this file")
+            logger.error("Requested date not covered in this file")
             raise ValueError("Requested date not covered in this file")
         dt = np.abs(np.array(self.time) - input_time)
 
@@ -749,8 +759,8 @@
         profile_datetime = self.time[profile_idx]
 
         dt_min = dt[profile_idx]
-        if dt_min > datetime.timedelta(seconds=self.duration):
-            logging.warning("Nearest profile more than 60 seconds away. dt = %s." % dt_min)
+        if dt_min > datetime.timedelta(seconds=max_duration):
+            logger.warning("Nearest profile more than %s seconds away. dt = %s." % (max_duration, dt_min))
 
         return profile_datetime, profile_idx
 
@@ -1078,7 +1088,7 @@
             ax1.axes.xaxis.set_major_locator(hourlocator)
 
         # Get the values of the time axis
-        dt = datetime.timedelta(seconds=self.duration)
+        dt = datetime.timedelta(seconds=self.duration[-1])
 
         time_cut = self.time[zoom[2]:zoom[3]]
         time_last = time_cut[-1] + dt  # The last element needed for pcolormesh
--- a/atmospheric_lidar/licel.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/licel.py	Sat Jan 06 10:08:46 2018 +0200
@@ -14,9 +14,12 @@
                             'LS1 Rate1 LS2 Rate2 DataSets', ]
 licel_file_channel_format = 'Active AnalogPhoton LaserUsed DataPoints 1 HV BinW Wavelength d1 d2 d3 d4 ADCbits NShots Discriminator ID'
 
+c = 299792458.0  # Speed of light
+
 
 class LicelFile:
     """ A class representing a single binary Licel file. """
+
     def __init__(self, file_path, use_id_as_name=False, licel_timezone="UTC"):
         """
         This is run when creating a new object.
@@ -101,7 +104,7 @@
         # Read second line
         second_line = f.readline()
 
-        # Many licel files don't follow the licel standard. Specifically, the
+        # Many Licel files don't follow the licel standard. Specifically, the
         # measurement site is not always 8 characters, and can include white
         # spaces. For this, the site name is detect everything before the first 
         # date. For efficiency, the first date is found by the first '/'.
@@ -110,11 +113,11 @@
         site_name = second_line.split('/')[0][:-2]
         clean_site_name = site_name.strip()
         raw_info['Site'] = clean_site_name
-        raw_info.update(match_lines(second_line[len(clean_site_name) + 1:], licel_file_header_format[1]))
+        raw_info.update(self.match_lines(second_line[len(clean_site_name) + 1:], licel_file_header_format[1]))
 
         # Read third line
         third_line = f.readline()
-        raw_info.update(match_lines(third_line, licel_file_header_format[2]))
+        raw_info.update(self.match_lines(third_line, licel_file_header_format[2]))
 
         # Update the object properties based on the raw info
         start_string = '%s %s' % (raw_info['StartDate'], raw_info['StartTime'])
@@ -140,7 +143,7 @@
 
         # Read the rest of the header.
         for c1 in range(int(raw_info['DataSets'])):
-            channel_info.append(match_lines(f.readline(), licel_file_channel_format))
+            channel_info.append(self.match_lines(f.readline(), licel_file_channel_format))
 
         self.raw_info = raw_info
         self.channel_info = channel_info
@@ -156,10 +159,24 @@
         dt = self.stop_time - self.start_time
         return dt.seconds
 
+    @staticmethod
+    def match_lines(f1, f2):
+        list1 = f1.split()
+        list2 = f2.split()
+
+        if len(list1) != len(list2):
+            logging.debug("Channel parameter list has different length from licel specifications.")
+            logging.debug("List 1: %s" % list1)
+            logging.debug("List 2: %s" % list2)
+        combined = zip(list2, list1)
+        combined = dict(combined)
+        return combined
+
 
 class LicelFileChannel:
     """ A class representing a single channel found in a single Licel file."""
-    def __init__(self, raw_info=None, raw_data=None, duration=None, use_id_as_name=False):
+
+    def __init__(self, raw_info, raw_data, duration, use_id_as_name=False):
         """
         This is run when creating a new object.
 
@@ -179,6 +196,22 @@
         self.raw_data = raw_data
         self.duration = duration
         self.use_id_as_name = use_id_as_name
+        self.adcbits = int(raw_info['ADCbits'])
+        self.active = int(raw_info['Active'])
+        self.analog_photon = raw_info['AnalogPhoton']
+        self.bin_width = float(raw_info['BinW'])
+        self.data_points = int(raw_info['DataPoints'])
+
+        self.hv = float(raw_info['HV'])
+        self.id = raw_info['ID']
+        self.laser_user = int(raw_info['LaserUsed'])
+        self.number_of_shots = int(raw_info['NShots'])
+        self.wavelength_str = raw_info['Wavelength']
+
+        if self.is_analog:
+            self.discriminator = float(raw_info['Discriminator']) * 1000  # Analog range in mV
+        else:
+            self.discriminator = float(raw_info['Discriminator'])
 
     @property
     def wavelength(self):
@@ -190,12 +223,8 @@
            The integer value describing the wavelength. If no raw_info have been provided, 
            returns None.
         """
-        if self.raw_info is not None:
-            wave_str = self.raw_info['Wavelength']
-            wavelength = wave_str.split('.')[0]
-            return int(wavelength)
-        else:
-            return None
+        wavelength = self.wavelength_str.split('.')[0]
+        return int(wavelength)
 
     @property
     def channel_name(self):
@@ -212,27 +241,22 @@
            The channel name
         """
         if self.use_id_as_name:
-            channel_name = self.raw_info['ID']
+            channel_name = self.id
         else:
-            acquisition_type = self.analog_photon_string(self.raw_info['AnalogPhoton'])
-            channel_name = "%s_%s" % (self.raw_info['Wavelength'], acquisition_type)
+            acquisition_type = self.analog_photon_string
+            channel_name = "%s_%s" % (self.wavelength_str, acquisition_type)
         return channel_name
 
-    @staticmethod
-    def analog_photon_string(analog_photon_number):
+    @property
+    def analog_photon_string(self):
         """ Convert the analog/photon flag found in the Licel file to a proper sting.
-        
-        Parameters
-        ----------
-        analog_photon_number : int
-           0 or 1 indicating analog or photon counting channel. 
-        
+
         Returns
         -------
         string : str
            'an' or 'ph' string, for analog or photon-counting channel respectively.
         """
-        if analog_photon_number == '0':
+        if self.analog_photon == '0':
             string = 'an'
         else:
             string = 'ph'
@@ -248,15 +272,13 @@
         """
         data = self.raw_data
 
-        number_of_shots = float(self.raw_info['NShots'])
-        norm = data / number_of_shots
+        norm = data / float(self.number_of_shots)
         dz = float(self.raw_info['BinW'])
 
         if self.raw_info['AnalogPhoton'] == '0':
             # If the channel is in analog mode
-            ADCb = int(self.raw_info['ADCbits'])
-            ADCrange = float(self.raw_info['Discriminator']) * 1000  # Value in mV
-            channel_data = norm * ADCrange / ((2 ** ADCb) - 1)
+            ADCrange = self.discriminator * 1000  # Value in mV
+            channel_data = norm * ADCrange / ((2 ** self.adcbits) - 1)
 
             # print ADCb, ADCRange,cdata,norm
         else:
@@ -267,30 +289,34 @@
             # channel_data = norm*SR
             # CHANGE:
             # For the SCC the data are needed in photons
-            channel_data = norm * number_of_shots
+            channel_data = norm * self.number_of_shots
             # print res,c,cdata,norm
 
         # Calculate Z
-        number_of_bins = int(self.raw_info['DataPoints'])
-        self.z = np.array([dz * bin_number + dz / 2.0 for bin_number in range(number_of_bins)])
+
+        self.z = np.array([dz * bin_number + dz / 2.0 for bin_number in range(self.data_points)])
         self.dz = dz
-        self.number_of_bins = number_of_bins
         self.data = channel_data
 
+    @property
+    def is_analog(self):
+        return self.analog_photon == '0'
+
 
 class LicelLidarMeasurement(BaseLidarMeasurement):
-    raw_info = {}  # Keep the raw info from the files
-    durations = {}  # Keep the duration of the files
-    laser_shots = []
 
     def __init__(self, file_list=None, use_id_as_name=False, licel_timezone='UTC'):
+        self.raw_info = {}  # Keep the raw info from the files
+        self.durations = {}  # Keep the duration of the files
+        self.laser_shots = []
+
         self.use_id_as_name = use_id_as_name
         self.licel_timezone = licel_timezone
         super(LicelLidarMeasurement, self).__init__(file_list)
 
     def _import_file(self, filename):
         if filename in self.files:
-            logging.warning("File has been imported already: %s" % filename)
+            logger.warning("File has been imported already: %s" % filename)
         else:
             current_file = LicelFile(filename, use_id_as_name=self.use_id_as_name, licel_timezone=self.licel_timezone)
             self.raw_info[current_file.filename] = current_file.raw_info
@@ -300,9 +326,8 @@
 
             for channel_name, channel in current_file.channels.items():
                 if channel_name not in self.channels:
-                    self.channels[channel_name] = LicelChannel(channel)
-                self.channels[channel_name].data[current_file.start_time] = channel.data
-                file_laser_shots.append(channel.raw_info['NShots'])
+                    self.channels[channel_name] = LicelChannel()
+                self.channels[channel_name].append_file(current_file.start_time, channel)
 
             self.laser_shots.append(file_laser_shots)
             self.files.append(current_file.filename)
@@ -329,28 +354,47 @@
 
         return duration_sec
 
-    def get_custom_channel_parameters(self):
+    def _get_custom_variables(self, channel_names):
+
+        daq_ranges = np.ma.masked_all(len(channel_names))
+        for n, channel_name in enumerate(channel_names):
+            channel = self.channels[channel_name]
+            if channel.is_analog:
+                unique_values = list(set(channel.discriminator))
+                if len(unique_values) > 1:
+                    logger.warning('More than one discriminator levels for channel {0}: {1}'.format(channel_name, unique_values))
+                daq_ranges[n] = unique_values[0]
+
+        laser_shots = []
+        for channel_name in channel_names:
+            channel = self.channels[channel_name]
+            laser_shots.append(channel.laser_shots)
+
+        laser_shots = np.array(laser_shots).T
+
         params = [{
             "name": "DAQ_Range",
             "dimensions": ('channels',),
             "type": 'd',
-            "values": [self.channels[x].raw_info['Discriminator'] for x in self.channels.keys()]
-        }, {
-            "name": "LR_Input",
-            "dimensions": ('channels',),
-            "type": 'i',
-            "values": [self.channels[x].raw_info['LaserUsed'] for x in self.channels.keys()]
+            "values": daq_ranges,
         }, {
             "name": "Laser_Shots",
             "dimensions": ('time', 'channels',),
             "type": 'i',
-            "values": self.laser_shots
+            "values": laser_shots,
         },
         ]
 
         return params
 
-    def get_custom_general_parameters(self):
+    def _get_custom_global_attributes(self):
+        """
+        NetCDF global attributes that should be included
+        in the final NetCDF file.
+
+        Currently the method assumes that all files in the measurement object have the same altitude, lat and lon
+        properties.
+        """
         params = [{
             "name": "Altitude_meter_asl",
             "value": self.raw_info[self.files[0]]["Altitude"]
@@ -367,41 +411,67 @@
 
 
 class LicelChannel(LidarChannel):
-    def __init__(self, channel_file):
-        c = 299792458.0  # Speed of light
-        self.wavelength = channel_file.wavelength
-        self.name = channel_file.channel_name
-        self.binwidth = channel_file.dz * 2 / c  # in seconds 
+
+    def __init__(self):
+        self.name = None
+        self.resolution = None
+        self.points = None
+        self.wavelength = None
+        self.laser_used = None
+
+        self.rc = []
+        self.raw_info = []
+        self.laser_shots = []
+        self.duration = []
+        self.number_of_shots = []
+        self.discriminator = []
+        self.hv = []
         self.data = {}
-        self.resolution = channel_file.dz
-        self.z = np.arange(
-            channel_file.number_of_bins) * self.resolution + self.resolution / 2.0  # Change: add half bin in the z
-        self.points = channel_file.number_of_bins
-        self.rc = []
-        self.duration = channel_file.duration
-        self.raw_info = channel_file.raw_info
+
+    def append_file(self, file_start_time, file_channel):
+        """ Append file to the current object """
+
+        self._assign_unique_property('name', file_channel.channel_name)
+        self._assign_unique_property('resolution', file_channel.dz)
+        self._assign_unique_property('wavelength', file_channel.wavelength)
+        self._assign_unique_property('points', file_channel.data_points)
+        self._assign_unique_property('adcbits', file_channel.adcbits)
+        self._assign_unique_property('active', file_channel.active)
+        self._assign_unique_property('laser_user', file_channel.laser_user)
+        self._assign_unique_property('adcbints', file_channel.adcbits)
+        self._assign_unique_property('analog_photon_string', file_channel.analog_photon_string)
+
+        self.binwidth = self.resolution * 2. / c  # in seconds
+        self.z = file_channel.z
 
-    def append(self, other):
-        if self.info != other.info:
-            raise ValueError('Channel info are different. Data can not be combined.')
+        self.data[file_start_time] = file_channel.data
+        self.raw_info.append(file_channel.raw_info)
+        self.laser_shots.append(file_channel.number_of_shots)
+        self.duration.append(file_channel.duration)
+        self.number_of_shots.append(file_channel.number_of_shots)
+        self.discriminator.append(file_channel.discriminator)
+        self.hv.append(file_channel.hv)
+
+    def _assign_unique_property(self, property_name, value):
+
+        current_value = getattr(self, property_name, None)
 
-        self.data = np.vstack([self.data, other.data])
+        if current_value is None:
+            setattr(self, property_name, value)
+        else:
+            if current_value != value:
+                raise ValueError('Cannot combine channels with different values of {0}.'.format(property_name))
+
+    @property
+    def is_analog(self):
+        return self.analog_photon_string == 'an'
+
+    @property
+    def is_photon_counting(self):
+        return self.analog_photon_string == 'ph'
 
     def __unicode__(self):
         return "<Licel channel: %s>" % self.name
 
     def __str__(self):
         return unicode(self).encode('utf-8')
-
-
-def match_lines(f1, f2):
-    list1 = f1.split()
-    list2 = f2.split()
-
-    if len(list1) != len(list2):
-        logging.debug("Channel parameter list has different length from licel specifications.")
-        logging.debug("List 1: %s" % list1)
-        logging.debug("List 2: %s" % list2)
-    combined = zip(list2, list1)
-    combined = dict(combined)
-    return combined
--- a/atmospheric_lidar/licel_depol.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/licel_depol.py	Sat Jan 06 10:08:46 2018 +0200
@@ -119,7 +119,7 @@
         new_measurement = self.subset_by_channels(ph_channels)
         return new_measurement
 
-    def _get_scc_mandatory_channel_variables(self):
+    def _get_scc_channel_variables(self):
         """
         Get a list of variables to put in the SCC.
 
--- a/atmospheric_lidar/systems/adam2017/adam2017.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/systems/adam2017/adam2017.py	Sat Jan 06 10:08:46 2018 +0200
@@ -5,7 +5,7 @@
 class ADAM2017LidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = adam2017_netcdf_parameters
 
-    def _get_scc_mandatory_channel_variables(self):
+    def _get_scc_channel_variables(self):
         channel_variables = \
             {'Background_Low': (('channels',), 'd'),
              'Background_High': (('channels',), 'd'),
--- a/atmospheric_lidar/systems/rali/rali.py	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/systems/rali/rali.py	Sat Jan 06 10:08:46 2018 +0200
@@ -1,33 +1,9 @@
-import radiometer
-
 from ...licel import LicelLidarMeasurement
 
 import rali_netcdf_parameters
 
+
 class RaliLidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = rali_netcdf_parameters
-    
-    def set_PT(self):
-        ''' Gets the pressure and temperature from Radiometer data.
-        If no data file is found, mean values from past measurements are 
-        used.
-        '''
-        
-        start_time = self.info['start_time']
-        stop_time = self.info['stop_time']
-        dt = stop_time - start_time
-        mean_time = start_time + dt/2
-        
-        meteo_triplet = radiometer.get_mean_PT(mean_time)
-        
-        if meteo_triplet:
-            pressure, temperature, humidity = meteo_triplet
-        else:
-            print "Radiometer meteo data not available. Using past values."
-            pressure = radiometer.P_mean[mean_time.month - 1, mean_time.hour]
-            temperature = radiometer.T_mean[mean_time.month - 1, mean_time.hour]
-            
-        self.info['Temperature'] = temperature - 273.15
-        self.info['Pressure'] = pressure
 
 
--- a/atmospheric_lidar/systems/rali/rali_diva_parameters.yaml	Thu Jan 04 14:47:36 2018 +0200
+++ b/atmospheric_lidar/systems/rali/rali_diva_parameters.yaml	Sat Jan 06 10:08:46 2018 +0200
@@ -18,14 +18,13 @@
     long_name: total elastic signal at 1064nm
     detector_type: APD                  # APD or PMT
     detection_mode: analog              # analog or photon-counting
-    detector_manufacturer: Hamamatsu    # Optional
+    detector_manufacturer: Hamamatsu    # Optional, use 'unknown' for blank
     detector_model: ABC                 # Optional
     daq_manufacturer: Licel             # Optional
-    daq_model: -                        # Optional
+    daq_model: unknown                  # Optional
     bin_length: 50                      # ns
-    pulse_delay: 5000                   # ns. Time difference between channel trigger and pulse emission
+    trigger_delay: 5000                 # ns. Time difference of channel trigger from pulse emission. Negative for pre-trigger
     laser_repetition_rate: 10           # Hz
-    signal_units: mV                    # counts or mV
     emission_wavelength: 1064.17        # nm
     emission_polarization: linear       # linear, circular, or none
     emission_energy: 100                # Nominal, in mJ
@@ -38,14 +37,13 @@
     long_name: parallel elastic signal at 532nm
     detector_type: PMT
     detection_mode: photon-counting     # analog or photon-counting
-    detector_manufacturer: Hamamatsu    # Optional, use - if not applicable
-    detector_model: ABC                 # Optional, use - if not applicable
-    daq_manufacturer: Licel             # Optional, use - if not applicable
-    daq_model: -                        # Optional, use - if not applicable
+    detector_manufacturer: Hamamatsu    # Optional, use 'unknown' for blank
+    detector_model: ABC                 # Optional
+    daq_manufacturer: Licel             # Optional
+    daq_model: unknown                  # Optional
     bin_length: 50                      # ns
-    pulse_delay: 5000                   # ns. Time difference between channel trigger and pulse emission
+    trigger_delay: 5000                 # ns. Time difference of channel trigger from pulse emission. Negative for pre-trigger
     laser_repetition_rate: 10           # Hz
-    signal_units: counts                # counts or mV
     emission_wavelength: 532.085        # nm    
     emission_polarization: linear       # linear, circular, or none
     emission_energy: 100                # Nominal, in mJ

mercurial