Improved documentation and clean-up of BaseLidarMeasurement class.

Wed, 06 Dec 2017 13:36:42 +0200

author
Iannis <ulalume3@yahoo.com>
date
Wed, 06 Dec 2017 13:36:42 +0200
changeset 92
6d26002aaeed
parent 91
f9d9d3ea8edb
child 93
c352254b4650

Improved documentation and clean-up of BaseLidarMeasurement class.

atmospheric_lidar/generic.py file | annotate | diff | comparison | revisions
atmospheric_lidar/licel.py file | annotate | diff | comparison | revisions
atmospheric_lidar/licel_depol.py file | annotate | diff | comparison | revisions
atmospheric_lidar/scripts/licel2scc.py file | annotate | diff | comparison | revisions
atmospheric_lidar/scripts/licel2scc_depol.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/eole/eole.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/ipral/ipral.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/lilas/lilas.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/pearl/pearl.py file | annotate | diff | comparison | revisions
atmospheric_lidar/systems/rali/rali.py file | annotate | diff | comparison | revisions
example_scripts/convert_ipral.py file | annotate | diff | comparison | revisions
example_scripts/convert_lilas.py file | annotate | diff | comparison | revisions
readme.rst file | annotate | diff | comparison | revisions
--- a/atmospheric_lidar/generic.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/generic.py	Wed Dec 06 13:36:42 2017 +0200
@@ -1,7 +1,6 @@
-# General imports
 import datetime
+import logging
 from operator import itemgetter
-import logging
 
 import matplotlib as mpl
 import netCDF4 as netcdf
@@ -9,24 +8,30 @@
 from matplotlib import pyplot as plt
 from matplotlib.ticker import ScalarFormatter
 
-netcdf_format = 'NETCDF3_CLASSIC'  # choose one of 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC' and 'NETCDF4'
+NETCDF_FORMAT = 'NETCDF4'  # choose one of 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC' and 'NETCDF4'
 
 
 class BaseLidarMeasurement(object):
-    """ This is the general measurement object.
-    It is meant to become a general measurement object 
-    independent of the input files.
+    """ 
+    This class represents a general measurement object, independent of the input files.
     
     Each subclass should implement the following:
-    * the import_file method.
-    * set the "extra_netcdf_parameters" variable to a dictionary that includes the appropriate parameters.
+    * the _import_file method;
+    * set the "extra_netcdf_parameters" variable to a dictionary that includes the appropriate parameters;
+    
+    You can override the set_PT method to define a custom procedure to get ground temperature and pressure.   
     
-    You can override the get_PT method to define a custom procedure to get ground temperature and pressure.
-    The one implemented by default is by using the MILOS meteorological station data. 
-    
+    The class assumes that the input files are consequtive, i.e. there are no measurements gaps.
     """
-
-    def __init__(self, filelist=None):
+    def __init__(self, file_list=None):
+        """
+        This is run when creating a new object.
+        
+        Parameters
+        ----------
+        file_list : list
+           A list of the full paths to the input file. 
+        """
         self.info = {}
         self.dimensions = {}
         self.variables = {}
@@ -34,27 +39,47 @@
         self.attributes = {}
         self.files = []
         self.dark_measurement = None
+        self.extra_netcdf_parameters = None
 
-        if filelist:
-            self.import_files(filelist)
+        if file_list:
+            self._import_files(file_list)
 
-    def import_files(self, filelist):
-        for f in filelist:
-            self.import_file(f)
+    def _import_files(self, file_list):
+        """
+        Imports a list of files, and updates the object parameters.
+        
+        Parameters
+        ----------
+        file_list : list
+           A list of the full paths to the input file. 
+        """
+        for f in file_list:
+            self._import_file(f)
         self.update()
 
-    def import_file(self, filename):
+    def _import_file(self, filename):
+        """
+        Reads a single lidar file.
+        
+        This method should be overwritten at all subclasses.
+        
+        Parameters
+        ----------
+        filename : str
+           Path to the lidar file.
+        """
         raise NotImplementedError('Importing files should be defined in the instrument-specific subclass.')
 
     def update(self):
-        '''
-        Update the the info, variables and dimensions of the lidar measurement based 
-        on the information found in the channels.
+        """
+        Update the info dictionary, variables, and dimensions of the measurement object
+        based on the information found in the channels objects.
         
+        Note
+        ----
         Reading of the scan_angles parameter is not implemented.
-        '''
+        """
         # Initialize
-
         start_time = []
         stop_time = []
         points = []
@@ -86,67 +111,90 @@
 
         # Update the variables dictionary
         # Write time scales in seconds
-        raw_Data_Start_Time = []
-        raw_Data_Stop_Time = []
+        raw_data_start_time = []
+        raw_data_stop_time = []
 
         for current_time_scale in list(time_scales):
             raw_start_time = np.array(current_time_scale) - min(start_time)  # Time since start_time
             raw_start_in_seconds = np.array([t.seconds for t in raw_start_time])  # Convert in seconds
-            raw_Data_Start_Time.append(raw_start_in_seconds)  # And add to the list
-            # Check if this time scale has measurements every 30 or 60 seconds.
+            raw_data_start_time.append(raw_start_in_seconds)  # And add to the list
 
             duration = self._get_duration(raw_start_in_seconds)
 
+            # TODO: Define stop time for each measurement based on real data
             raw_stop_in_seconds = raw_start_in_seconds + duration
-            raw_Data_Stop_Time.append(raw_stop_in_seconds)
+            raw_data_stop_time.append(raw_stop_in_seconds)
 
-        self.variables['Raw_Data_Start_Time'] = raw_Data_Start_Time
-        self.variables['Raw_Data_Stop_Time'] = raw_Data_Stop_Time
+        self.variables['Raw_Data_Start_Time'] = raw_data_start_time
+        self.variables['Raw_Data_Stop_Time'] = raw_data_stop_time
 
         # Make a dictionary to match time scales and channels
         channel_timescales = []
         for (channel_name, current_time_scale) in zip(channel_name_list, all_time_scales):
-            # The following lines are PEARL specific. The reason they are here is not clear.
-            # if channel_name =='1064BLR':
-            #     channel_name = '1064'
             for (ts, n) in zip(time_scales, range(len(time_scales))):
                 if current_time_scale == ts:
                     channel_timescales.append([channel_name, n])
         self.variables['id_timescale'] = dict(channel_timescales)
 
     def _get_duration(self, raw_start_in_seconds):
-        ''' Return the duration for a given time scale. In some files (e.g. Licel) this
-        can be specified from the files themselves. In others this must be guessed.
-         
-        '''
-        # The old method, kept here for reference
-        # dt = np.mean(np.diff(raw_start_in_seconds))
-        # for d in duration_list:
-        #    if abs(dt - d) <15: #If the difference of measurements is 10s near the(30 or 60) seconds
-        #        duration = d
-
+        """ 
+        Return the duration for a given time scale. 
+       
+        In some files (e.g. Licel) this can be specified from the files themselves. 
+        In others this must be guessed.
+        
+        Parameters
+        ----------
+        raw_start_in_seconds : array
+           An array of floats, representing the start time of each measurement profile.
+        
+        Returns
+        -------
+        duration : float
+           Guess of the duration of each bin in the timescale
+        """
         duration = np.diff(raw_start_in_seconds)[0]
 
         return duration
 
     def subset_by_channels(self, channel_subset):
-        ''' Get a measurement object containing only the channels with names
-        contained in the channel_sublet list '''
+        """ 
+        Create a measurement object containing only a subset of  channels. 
+        
+        Parameters
+        ----------
+        channel_subset : list
+           A list of channel names (str) to be included in the new measurement object.
+        
+        Returns
+        -------
+        m : BaseLidarMeasurements object
+           A new measurements object
+        """
 
         m = self.__class__()  # Create an object of the same type as this one.
         m.channels = dict([(channel, self.channels[channel]) for channel
                            in channel_subset])
-                           
+
         m.files = self.files
-        
+
         m.update()
-        
+
         return m
 
     def subset_by_scc_channels(self):
         """
-        Subset the measurement based on the channels provided in the extra_netecdf_parameter file.
+        Subset the measurement based on the channels provided in the 
+        extra_netecdf_parameter file.
+        
+        Returns
+        -------
+        m : BaseLidarMeasurements object
+           A new measurements object
         """
+        if self.extra_netcdf_parameters is None:
+            raise RuntimeError("Extra netCDF parameters not defined, cannot subset measurement.")
+
         scc_channels = self.extra_netcdf_parameters.channel_parameters.keys()
         common_channels = list(set(scc_channels).intersection(self.channels.keys()))
 
@@ -158,6 +206,21 @@
         return self.subset_by_channels(common_channels)
 
     def subset_by_time(self, start_time, stop_time):
+        """
+        Subset the measurement for a specific time interval
+        
+        Parameters
+        ----------
+        start_time : datetime 
+           The starting datetime to subset.
+        stop_time : datetime
+           The stopping datetime to subset.
+
+        Returns
+        -------
+        m : BaseLidarMeasurements object
+           A new measurements object
+        """
 
         if start_time > stop_time:
             raise ValueError('Stop time should be after start time')
@@ -168,14 +231,29 @@
         m = self.__class__()  # Create an object of the same type as this one.
         for (channel_name, channel) in self.channels.items():
             m.channels[channel_name] = channel.subset_by_time(start_time, stop_time)
+
         m.update()
         return m
 
     def subset_by_bins(self, b_min=0, b_max=None):
-        """Remove some height bins from the file. This could be needed to 
-        remove aquisition artifacts at the start or the end of the files.
         """
-
+        Remove some height bins from the file. 
+        
+        This could be needed to remove acquisition artifacts at 
+        the first or last bins of the profiles.
+        
+        Parameters
+        ----------
+        b_min : int
+          The fist valid data bin
+        b_max : int or None
+          The last valid data bin. If equal to None, all bins are used.
+          
+        Returns
+        -------
+        m : BaseLidarMeasurements object
+           A new measurements object
+        """
         m = self.__class__()  # Create an object of the same type as this one.
 
         for (channel_name, channel) in self.channels.items():
@@ -185,12 +263,17 @@
 
         return m
 
-    def rename_channel(self, prefix="", suffix=""):
-        """ Add a prefix and a suffix in a channel name.
-
-        :param prefix: A string for the prefix
-        :param suffix: A string for the suffix
-        :return: Nothing
+    def rename_channels(self, prefix="", suffix=""):
+        """ Add a prefix and a suffix to all channel name.
+        
+        This is uses when processing Delta90 depolarization calibration measurements.
+        
+        Parameters
+        ----------
+        prefix : str
+          The prefix to add to channel names. 
+        suffix : str
+          The suffix to add to channel names.
         """
         channel_names = self.channels.keys()
 
@@ -198,16 +281,27 @@
             new_name = prefix + channel_name + suffix
             self.channels[new_name] = self.channels.pop(channel_name)
 
-    def get_PT(self):
-        ''' Sets the pressure and temperature at station level .
-        The results are stored in the info dictionary.        
-        '''
-
-        self.info['Temperature'] = 10.0
-        self.info['Pressure'] = 930.0
+    def set_PT(self):
+        """ 
+        Sets the pressure and temperature at station level at the info dictionary .
+        
+        In this method, default values are used. It can be overwritten by subclasses
+        to define more appropriate values for each system.
+        """
+        self.info['Temperature'] = 15.0  # Temperature in degC
+        self.info['Pressure'] = 1013.15  # Pressure in hPa
 
     def subtract_dark(self):
-
+        """
+        Subtract dark measurements from the raw lidar signals. 
+         
+        This method is here just for testing.
+        
+        Note
+        ----       
+        This method should not be called if processing the data with the SCC. The SCC
+        performs this operations anyway. 
+        """
         if not self.dark_measurement:
             raise IOError('No dark measurements have been imported yet.')
 
@@ -228,10 +322,10 @@
         ----------
         measurement_id: str
            A measurement id with the format YYYYMMDDccNN, where YYYYMMDD the date,
-           cc the earlinet call sign and NN a number between 00 and 99.
+           cc the EARLiNet call sign and NN a number between 00 and 99.
         measurement_number: str
            If measurement id is not provided the method will try to create one
-           based on the input dete. The measurement number can specify the value
+           based on the input date. The measurement number can specify the value
            of NN in the created ID.
         """
         if measurement_id is None:
@@ -244,10 +338,15 @@
 
         self.info['Measurement_ID'] = measurement_id
 
-    def save_as_netcdf(self, filename=None):
-        """Saves the measurement in the netcdf format as required by the SCC.
-        Input: filename. If no filename is provided <measurement_id>.nc will
-               be used. 
+    def save_as_SCC_netcdf(self, filename=None):
+        """Saves the measurement in the netCDF format as required by the SCC.
+        
+        If no filename is provided <measurement_id>.nc will be used. 
+        
+        Parameters
+        ----------
+        filename : str
+           Output file name. If None, <measurement_id>.nc will be used. 
         """
         params = self.extra_netcdf_parameters
 
@@ -260,7 +359,7 @@
             stored_value = self.info.get(parameter, None)
             if stored_value is None:
                 try:
-                    self.get_PT()
+                    self.set_PT()
                 except:
                     raise ValueError('A value needs to be specified for %s' % parameter)
 
@@ -308,128 +407,127 @@
         input_values['Latitude_degrees_north'] = params.general_parameters['Latitude_degrees_north']
         input_values['Longitude_degrees_east'] = params.general_parameters['Longitude_degrees_east']
         input_values['Altitude_meter_asl'] = params.general_parameters['Altitude_meter_asl']
-        
-        # Override general paremeters with those provided by any subclass
+
+        # Override general parameters with those provided by any subclass
         # in a custom fashion
-        for param in self.getCustomGeneralParameters():
-            input_values[ param["name"] ] = param["value"]
+        for param in self.get_custom_general_parameters():
+            input_values[param["name"]] = param["value"]
 
-        # Open a netCDF4 file
-        f = netcdf.Dataset(filename, 'w', format=netcdf_format)  # the format is specified in the begining of the file
+        # Open a netCDF file. The format is specified in the beginning of this module.
+        with netcdf.Dataset(filename, 'w', format=NETCDF_FORMAT) as f:
+
+            # Create the dimensions in the file
+            for (d, v) in dimensions.iteritems():
+                v = input_values.pop(d, v)
+                f.createDimension(d, v)
 
-        # Create the dimensions in the file
-        for (d, v) in dimensions.iteritems():
-            v = input_values.pop(d, v)
-            f.createDimension(d, v)
+            # Create global attributes
+            for (attrib, value) in global_att.iteritems():
+                val = input_values.pop(attrib, value)
+                if val:
+                    setattr(f, attrib, val)
+
+            # Variables
 
-        # Create global attributes
-        for (attrib, value) in global_att.iteritems():
-            val = input_values.pop(attrib, value)
-            if val:
-                setattr(f, attrib, val)
+            # Write either channel_id or string_channel_id in the file
+            first_channel_keys = params.channel_parameters.items()[0][1].keys()
+            if "channel_ID" in first_channel_keys:
+                channel_var = 'channel_ID'
+                variable_type = 'i'
+            elif "channel string ID" in first_channel_keys:
+                channel_var = 'channel string ID'
+                variable_type = str
+            else:
+                raise ValueError('Channel parameters should define either "chanel_id" or "channel_string_ID".')
 
-        """ Variables """
-        # Write either channel_id or string_channel_id in the file
-        first_channel_keys = params.channel_parameters.items()[0][1].keys()
-        if "channel_ID" in first_channel_keys:
-            channel_var = 'channel_ID'
-            variable_type = 'i'
-        elif "channel string ID" in first_channel_keys:
-            channel_var = 'channel string ID'
-            variable_type = str
-        else:
-            raise ValueError('Channel parameters should define either "chanel_id" or "channel_string_ID".')
+            temp_v = f.createVariable(channel_var, variable_type, ('channels',))
+            for n, channel in enumerate(channels):
+                temp_v[n] = params.channel_parameters[channel][channel_var]
+
+            # Write the custom subclass parameters:
+            for param in self.get_custom_channel_parameters():
+                temp_v = f.createVariable(param["name"], param["type"], param["dimensions"])
+
+                for (value, n) in zip(param["values"], range(len(param["values"]))):
+                    temp_v[n] = value
 
-        temp_v = f.createVariable(channel_var, variable_type, ('channels',))
-        for n, channel in enumerate(channels):
-            temp_v[n] = params.channel_parameters[channel][channel_var]
-            
-        # Write the custom subclass parameters:
-        for param in self.getCustomChannelParameters():
-            temp_v = f.createVariable(param["name"], param["type"], param["dimensions"])
-            
-            for (value, n) in zip(param["values"], range(len(param["values"]))):
-                temp_v[n] = value
-        
-        # Write the values of fixed channel parameters:
-        fill_value = -9999
-        for param in self._get_provided_extra_parameters():
-            if param in channel_variables.keys():
-                try:
-                    temp_v = f.createVariable(param, channel_variables[param][1], channel_variables[param][0])
-                except RuntimeError:
-                    logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
-                    continue
-            else:
-                try:
-                    temp_v = f.createVariable(param, 'd', ('channels',), fill_value = fill_value)
-                except RuntimeError:
-                    logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
-                    continue
-                
+            # Write the values of fixed channel parameters:
+            fill_value = -9999
+            for param in self._get_provided_extra_parameters():
+                if param in channel_variables.keys():
+                    try:
+                        temp_v = f.createVariable(param, channel_variables[param][1], channel_variables[param][0])
+                    except RuntimeError:
+                        logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
+                        continue
+                else:
+                    try:
+                        temp_v = f.createVariable(param, 'd', ('channels',), fill_value=fill_value)
+                    except RuntimeError:
+                        logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % param)
+                        continue
+
+                for (channel, n) in zip(channels, range(len(channels))):
+                    try:
+                        temp_v[n] = params.channel_parameters[channel][param]
+                    except KeyError:  # The parameter was not provided for this channel so we mask the value.
+                        temp_v[n] = fill_value
+
+            # Write the id_timescale values
+            temp_id_timescale = f.createVariable('id_timescale', 'i', ('channels',))
             for (channel, n) in zip(channels, range(len(channels))):
-                try:
-                    temp_v[n] = params.channel_parameters[channel][param]
-                except KeyError: # The parameter was not provided for this channel so we mask the value.
-                    temp_v[n] = fill_value
+                temp_id_timescale[n] = self.variables['id_timescale'][channel]
 
-        # Write the id_timescale values
-        temp_id_timescale = f.createVariable('id_timescale', 'i', ('channels',))
-        for (channel, n) in zip(channels, range(len(channels))):
-            temp_id_timescale[n] = self.variables['id_timescale'][channel]
+            # Laser pointing angle
+            temp_v = f.createVariable('Laser_Pointing_Angle', 'd', ('scan_angles',))
+            temp_v[:] = params.general_parameters['Laser_Pointing_Angle']
+
+            # Molecular calculation
+            temp_v = f.createVariable('Molecular_Calc', 'i')
+            temp_v[:] = params.general_parameters['Molecular_Calc']
 
-        # Laser pointing angle
-        temp_v = f.createVariable('Laser_Pointing_Angle', 'd', ('scan_angles',))
-        temp_v[:] = params.general_parameters['Laser_Pointing_Angle']
+            # Laser pointing angles of profiles
+            temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles', 'i', ('time', 'nb_of_time_scales'))
+            for (time_scale, n) in zip(self.variables['Raw_Data_Start_Time'],
+                                       range(len(self.variables['Raw_Data_Start_Time']))):
+                temp_v[:len(time_scale), n] = 0  # The lidar has only one laser pointing angle
 
-        # Molecular calculation
-        temp_v = f.createVariable('Molecular_Calc', 'i')
-        temp_v[:] = params.general_parameters['Molecular_Calc']
-
-        # Laser pointing angles of profiles
-        temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles', 'i', ('time', 'nb_of_time_scales'))
-        for (time_scale, n) in zip(self.variables['Raw_Data_Start_Time'],
-                                   range(len(self.variables['Raw_Data_Start_Time']))):
-            temp_v[:len(time_scale), n] = 0  # The lidar has only one laser pointing angle
+            # Raw data start/stop time
+            temp_raw_start = f.createVariable('Raw_Data_Start_Time', 'i', ('time', 'nb_of_time_scales'))
+            temp_raw_stop = f.createVariable('Raw_Data_Stop_Time', 'i', ('time', 'nb_of_time_scales'))
+            for (start_time, stop_time, n) in zip(self.variables['Raw_Data_Start_Time'],
+                                                  self.variables['Raw_Data_Stop_Time'],
+                                                  range(len(self.variables['Raw_Data_Start_Time']))):
+                temp_raw_start[:len(start_time), n] = start_time
+                temp_raw_stop[:len(stop_time), n] = stop_time
 
-        # Raw data start/stop time
-        temp_raw_start = f.createVariable('Raw_Data_Start_Time', 'i', ('time', 'nb_of_time_scales'))
-        temp_raw_stop = f.createVariable('Raw_Data_Stop_Time', 'i', ('time', 'nb_of_time_scales'))
-        for (start_time, stop_time, n) in zip(self.variables['Raw_Data_Start_Time'],
-                                              self.variables['Raw_Data_Stop_Time'],
-                                              range(len(self.variables['Raw_Data_Start_Time']))):
-            temp_raw_start[:len(start_time), n] = start_time
-            temp_raw_stop[:len(stop_time), n] = stop_time
-
-        # Laser shots
-        try:
-            temp_v = f.createVariable('Laser_Shots', 'i', ('time', 'channels'))
-            for (channel, n) in zip(channels, range(len(channels))):
-                time_length = len(self.variables['Raw_Data_Start_Time'][self.variables['id_timescale'][channel]])
-                # Array slicing stoped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
-                temp_v[:time_length, n] = np.ones(time_length) * params.channel_parameters[channel]['Laser_Shots']
-        except RuntimeError:
-            logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % "LaserShots")
+            # Laser shots
+            try:
+                temp_v = f.createVariable('Laser_Shots', 'i', ('time', 'channels'))
+                for (channel, n) in zip(channels, range(len(channels))):
+                    time_length = len(self.variables['Raw_Data_Start_Time'][self.variables['id_timescale'][channel]])
+                    # Array slicing stoped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
+                    temp_v[:time_length, n] = np.ones(time_length) * params.channel_parameters[channel]['Laser_Shots']
+            except RuntimeError:
+                logging.warning("NetCDF variable \"%s\" ignored because it was read from the input files!" % "LaserShots")
 
+            # Raw lidar data
+            temp_v = f.createVariable('Raw_Lidar_Data', 'd', ('time', 'channels', 'points'))
+            for (channel, n) in zip(channels, range(len(channels))):
+                c = self.channels[channel]
+                temp_v[:len(c.time), n, :c.points] = c.matrix
 
-        # Raw lidar data
-        temp_v = f.createVariable('Raw_Lidar_Data', 'd', ('time', 'channels', 'points'))
-        for (channel, n) in zip(channels, range(len(channels))):
-            c = self.channels[channel]
-            temp_v[:len(c.time), n, :c.points] = c.matrix
-
-        self.add_dark_measurements_to_netcdf(f, channels)
+            self.add_dark_measurements_to_netcdf(f, channels)
 
-        # Pressure at lidar station
-        temp_v = f.createVariable('Pressure_at_Lidar_Station', 'd')
-        temp_v[:] = self.info['Pressure']
+            # Pressure at lidar station
+            temp_v = f.createVariable('Pressure_at_Lidar_Station', 'd')
+            temp_v[:] = self.info['Pressure']
 
-        # Temperature at lidar station
-        temp_v = f.createVariable('Temperature_at_Lidar_Station', 'd')
-        temp_v[:] = self.info['Temperature']
+            # Temperature at lidar station
+            temp_v = f.createVariable('Temperature_at_Lidar_Station', 'd')
+            temp_v[:] = self.info['Temperature']
 
-        self.save_netcdf_extra(f)
-        f.close()
+            self.save_netcdf_extra(f)
 
     def _get_scc_mandatory_channel_variables(self):
         channel_variables = \
@@ -439,37 +537,46 @@
              'DAQ_Range': (('channels',), 'd'),
              }
         return channel_variables
-        
+
     def _get_provided_extra_parameters(self):
         # When looking for non-mandatory channel parameters, ignore the following parameter names:
         ignore = ['channel_ID', 'channel string ID', 'Depolarization_Factor', 'Laser_Shots']
-        
+
         channels = self.channels.keys()
         params = self.extra_netcdf_parameters.channel_parameters
         mandatory = self._get_scc_mandatory_channel_variables()
-        
+
         # Get all the provided extra parameters (both mandatory and optional):
         provided_extra_parameters = []
         for (channel, n) in zip(channels, range(len(channels))):
             # Check all the mandatory parameters are provided for each of the channels:
             for var in mandatory.keys():
                 if var not in params[channel].keys():
-                    raise ValueError ("Mandatory parameter '{0}' not provided for channel {1}!".format(
+                    raise ValueError("Mandatory parameter '{0}' not provided for channel {1}!".format(
                         var,
                         channel
                     ))
-                    
+
             provided_extra_parameters.extend(params[channel].keys())
-            
+
         provided_extra_parameters = set(provided_extra_parameters)
         # Discard certain parameter names:
         for param in ignore:
             provided_extra_parameters.discard(param)
-            
+
         return provided_extra_parameters
 
     def add_dark_measurements_to_netcdf(self, f, channels):
-
+        """
+        Adds dark measurement variables and properties to an open netCDF file.
+        
+        Parameters
+        ----------
+        f : netcdf Dataset
+           A netCDF Dataset, open for writing.
+        channels : list
+           A list of channels names to consider when adding dark measurements.
+        """
         # Get dark measurements. If it is not given in self.dark_measurement
         # try to get it using the get_dark_measurements method. If none is found
         # return without adding something.
@@ -509,12 +616,12 @@
         f.RawBck_Stop_Time_UT = dark_measurement.info['stop_time'].strftime('%H%M%S')
 
     def save_netcdf_extra(self, f):
+        """ Save extra netCDF parameters to an open netCDF file. 
+        
+        If required, this method should be overwritten by subclasses of BaseLidarMeasurement.
+        """
         pass
 
-    def _gettime(self, date_str, time_str):
-        t = datetime.datetime.strptime(date_str + time_str, '%d/%m/%Y%H.%M.%S')
-        return t
-
     def plot(self):
         for channel in self.channels:
             self.channels[channel].plot(show_plot=False)
@@ -522,32 +629,24 @@
 
     def get_dark_measurements(self):
         return None
-        
-    def getCustomGeneralParameters(self):
-        """
-        Abstract method to provide custom NetCDF parameters
-        that should be included in the final NetCDF file.
-        This method should be implemented by subclasses of
-        BaseLidarMeasurement.
+
+    def get_custom_general_parameters(self):
         """
-        return []
+        Abstract method to provide custom NetCDF parameters  that should be included 
+        in the final NetCDF file.
         
-    def getCustomChannelParameters(self):
-        """
-        Abstract method to provide custom NetCDF parameters
-        for the channels in this measurement that should be
-        included in the final NetCDF file. This method should
-        be implemented by subclasses of BaseLidarMeasurement.
+        If required, this method should be implemented by subclasses of BaseLidarMeasurement.
         """
         return []
 
-    @property
-    def mean_time(self):
-        start_time = self.info['start_time']
-        stop_time = self.info['stop_time']
-        dt = stop_time - start_time
-        t_mean = start_time + dt / 2
-        return t_mean
+    def get_custom_channel_parameters(self):
+        """
+        Abstract method to provide custom NetCDF parameters for the channels in this 
+        measurement that should be included in the final NetCDF file. 
+        
+        If required, this method should be implemented by subclasses of BaseLidarMeasurement.
+        """
+        return []
 
 
 class LidarChannel:
@@ -616,7 +715,7 @@
 
     def subset_by_bins(self, b_min=0, b_max=None):
         """Remove some height bins from the file. This could be needed to 
-        remove aquisition artifacts at the start or the end of the files.
+        remove acquisition artifacts at the start or the end of the files.
         """
 
         subset_data = {}
@@ -704,7 +803,7 @@
         else:
             data = self.matrix
 
-        hmax_idx = self.index_at_height(zoom[1])
+        hmax_idx = self._index_at_height(zoom[1])
 
         # If z0 is given, then the plot is a.s.l.
         if z0:
@@ -770,8 +869,8 @@
         else:
             data = self.matrix
 
-        hmax_idx = self.index_at_height(zoom[1])
-        hmin_idx = self.index_at_height(zoom[0])
+        hmax_idx = self._index_at_height(zoom[1])
+        hmin_idx = self._index_at_height(zoom[0])
 
         # If z0 is given, then the plot is a.s.l.
         if z0:
@@ -785,7 +884,6 @@
         # dateFormatter = mpl.dates.DateFormatter('%H.%M')
         # hourlocator = mpl.dates.HourLocator()
 
-
         if date_labels:
             dayFormatter = mpl.dates.DateFormatter('%H:%M\n%d/%m/%Y')
             daylocator = mpl.dates.AutoDateLocator(minticks=3, maxticks=8, interval_multiples=True)
@@ -835,17 +933,8 @@
                 ticklabels.set_fontsize(cb_font_size)
             cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
 
-    def index_at_height(self, height):
+    def _index_at_height(self, height):
         idx = np.array(np.abs(self.z - height).argmin())
         if idx.size > 1:
             idx = idx[0]
         return idx
-
-
-def netcdf_from_files(LidarClass, filename, files, channels, measurement_ID):
-    # Read the lidar files and select channels
-    temp_m = LidarClass(files)
-    m = temp_m.subset_by_channels(channels)
-    m.get_PT()
-    m.info['Measurement_ID'] = measurement_ID
-    m.save_as_netcdf(filename)
--- a/atmospheric_lidar/licel.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/licel.py	Wed Dec 06 13:36:42 2017 +0200
@@ -197,11 +197,11 @@
     durations = {}  # Keep the duration of the files
     laser_shots = []
 
-    def __init__(self, filelist=None, use_id_as_name=False):
+    def __init__(self, file_list=None, use_id_as_name=False):
         self.use_id_as_name = use_id_as_name
-        super(LicelLidarMeasurement, self).__init__(filelist)
+        super(LicelLidarMeasurement, self).__init__(file_list)
 
-    def import_file(self, filename):
+    def _import_file(self, filename):
         if filename in self.files:
             logging.warning("File has been imported already: %s" % filename)
         else:
@@ -229,10 +229,10 @@
             channel.append(other.channels[channel_name])
 
     def _get_duration(self, raw_start_in_seconds):
-        ''' Return the duration for a given time scale. If only a single
+        """ Return the duration for a given time scale. If only a single
         file is imported, then this cannot be guessed from the time difference
         and the raw_info of the file are checked.
-        '''
+        """
 
         if len(raw_start_in_seconds) == 1:  # If only one file imported
             duration = self.durations.itervalues().next()  # Get the first (and only) raw_info
@@ -242,7 +242,7 @@
 
         return duration_sec
         
-    def getCustomChannelParameters(self):
+    def get_custom_channel_parameters(self):
         params = [{
                 "name": "DAQ_Range",
                 "dimensions": ('channels',),
@@ -263,7 +263,7 @@
         
         return params
         
-    def getCustomGeneralParameters(self):
+    def get_custom_general_parameters(self):
         params = [{
                 "name": "Altitude_meter_asl",
                 "value": self.raw_info[ self.files[0] ]["Altitude"]
--- a/atmospheric_lidar/licel_depol.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/licel_depol.py	Wed Dec 06 13:36:42 2017 +0200
@@ -40,10 +40,10 @@
 
         # Read plus and minus 45 measurements
         self.plus45_measurement = LicelLidarMeasurement(self.plus45_files, self.use_id_as_name)
-        self.plus45_measurement.rename_channel(suffix='_p45')
+        self.plus45_measurement.rename_channels(suffix='_p45')
 
         self.minus45_measurement = LicelLidarMeasurement(self.minus45_files, self.use_id_as_name)
-        self.minus45_measurement.rename_channel(suffix='_m45')
+        self.minus45_measurement.rename_channels(suffix='_m45')
 
         # Combine them in this object
         self.channels = {}
--- a/atmospheric_lidar/scripts/licel2scc.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/scripts/licel2scc.py	Wed Dec 06 13:36:42 2017 +0200
@@ -38,10 +38,10 @@
     class CustomLidarMeasurement(LicelLidarMeasurement):
         extra_netcdf_parameters = custom_netcdf_parameters
 
-        def __init__(self, filelist=None):
-            super(CustomLidarMeasurement, self).__init__(filelist, use_id_as_name)
+        def __init__(self, file_list=None):
+            super(CustomLidarMeasurement, self).__init__(file_list, use_id_as_name)
 
-        def get_PT(self):
+        def set_PT(self):
             ''' Sets the pressure and temperature at station level. This is used if molecular_calc parameter is
             set to 0 (i.e. use US Standard atmosphere).
 
@@ -147,6 +147,6 @@
     # Save the netcdf
     logger.info("Saving netcdf")
     measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-    measurement.save_as_netcdf()
+    measurement.save_as_SCC_netcdf()
     logger.info("Created file %s" % measurement.scc_filename)
 
--- a/atmospheric_lidar/scripts/licel2scc_depol.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/scripts/licel2scc_depol.py	Wed Dec 06 13:36:42 2017 +0200
@@ -41,7 +41,7 @@
         def __init__(self, plus45_files=None, minus45_files=None):
             super(CustomLidarMeasurement, self).__init__(plus45_files, minus45_files, use_id_as_name)
 
-        def get_PT(self):
+        def set_PT(self):
             ''' Sets the pressure and temperature at station level. This is used if molecular_calc parameter is
             set to 0 (i.e. use US Standard atmosphere).
 
@@ -136,5 +136,5 @@
     # Save the netcdf
     logger.info("Saving netcdf")
     measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-    measurement.save_as_netcdf()
+    measurement.save_as_SCC_netcdf()
     logger.info("Created file %s" % measurement.scc_filename)
--- a/atmospheric_lidar/systems/eole/eole.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/systems/eole/eole.py	Wed Dec 06 13:36:42 2017 +0200
@@ -5,7 +5,7 @@
 class EoleLidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = eole_netcdf_parameters
 
-    def get_PT(self):
+    def set_PT(self):
         ''' Sets the pressure and temperature at station level .
         The results are stored in the info dictionary.        
         '''
--- a/atmospheric_lidar/systems/ipral/ipral.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/systems/ipral/ipral.py	Wed Dec 06 13:36:42 2017 +0200
@@ -5,10 +5,10 @@
 class IpralLidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = ipral_netcdf_parameters
 
-    def __init__(self, filelist=None, use_id_as_name=True):
-        super(IpralLidarMeasurement, self).__init__(filelist, use_id_as_name)
+    def __init__(self, file_list=None, use_id_as_name=True):
+        super(IpralLidarMeasurement, self).__init__(file_list, use_id_as_name)
 
-    def get_PT(self):
+    def set_PT(self):
         ''' Sets the pressure and temperature at station level .
         The results are stored in the info dictionary.        
         '''
--- a/atmospheric_lidar/systems/lilas/lilas.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/systems/lilas/lilas.py	Wed Dec 06 13:36:42 2017 +0200
@@ -5,7 +5,7 @@
 class LilasLidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = lilas_netcdf_parameters
 
-    def get_PT(self):
+    def set_PT(self):
         ''' Sets the pressure and temperature at station level .
         The results are stored in the info dictionary.        
         '''
--- a/atmospheric_lidar/systems/pearl/pearl.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/systems/pearl/pearl.py	Wed Dec 06 13:36:42 2017 +0200
@@ -18,14 +18,14 @@
     
     extra_netcdf_parameters = pearl_netcdf_parameters
     
-    def import_file(self,filename):
+    def _import_file(self, filename):
         ''' Import a pearl file. '''
         
         if filename in self.files:
             print "File has been imported already:" + filename
         else:
             parameters, channels_dict = self.read_pearl_data(filename)
-            start_time = self._gettime(parameters['Acq_date'],parameters['Acq_start_time'])
+            start_time = self._get_time(parameters['Acq_date'], parameters['Acq_start_time'])
             
             for channel_info in channels_dict.itervalues():
                 
@@ -81,6 +81,9 @@
         f.close()
         return parameters,channels
                 
+    def _get_time(self, date_str, time_str):
+        t = datetime.datetime.strptime(date_str + time_str, '%d/%m/%Y%H.%M.%S')
+        return t
 
 def get_measurement_for_interval(start_time, stop_time):
     ''' Searches for a pearl measurement based on a time interval     
--- a/atmospheric_lidar/systems/rali/rali.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/atmospheric_lidar/systems/rali/rali.py	Wed Dec 06 13:36:42 2017 +0200
@@ -7,7 +7,7 @@
 class RaliLidarMeasurement(LicelLidarMeasurement):
     extra_netcdf_parameters = rali_netcdf_parameters
     
-    def get_PT(self):
+    def set_PT(self):
         ''' Gets the pressure and temperature from Radiometer data.
         If no data file is found, mean values from past measurements are 
         used.
--- a/example_scripts/convert_ipral.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/example_scripts/convert_ipral.py	Wed Dec 06 13:36:42 2017 +0200
@@ -68,7 +68,7 @@
         #Save the netcdf
         print "Saving netcdf."
         measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-        measurement.save_as_netcdf()
+        measurement.save_as_SCC_netcdf()
         print "Created file ", measurement.scc_filename
     else:
         print "No files found when searching for ", search_str
\ No newline at end of file
--- a/example_scripts/convert_lilas.py	Wed Dec 06 11:50:41 2017 +0200
+++ b/example_scripts/convert_lilas.py	Wed Dec 06 13:36:42 2017 +0200
@@ -68,7 +68,7 @@
         #Save the netcdf
         print "Saving netcdf."
         measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-        measurement.save_as_netcdf()
+        measurement.save_as_SCC_netcdf()
         print "Created file ", measurement.scc_filename
     else:
         print "No files found when searching for ", search_str
\ No newline at end of file
--- a/readme.rst	Wed Dec 06 11:50:41 2017 +0200
+++ b/readme.rst	Wed Dec 06 13:36:42 2017 +0200
@@ -176,6 +176,6 @@
 
 .. code-block:: python
 
-   my_measurement.save_as_netcdf("filename")
+   my_measurement.save_as_SCC_netcdf("filename")
 
 where you change the output filename to the filename you want to use.
\ No newline at end of file

mercurial