Merge from 51:29cfdff59777

Thu, 16 Feb 2017 10:27:30 +0200

author
Iannis <ulalume3@yahoo.com>
date
Thu, 16 Feb 2017 10:27:30 +0200
changeset 52
fdf4373db30e
parent 51
d91b87f26354 (current diff)
parent 50
510d7ac14280 (diff)
child 53
cd7cffb43bfd

Merge from 51:29cfdff59777

example_scripts/convert_ipral.py file | annotate | diff | comparison | revisions
example_scripts/convert_lilas.py file | annotate | diff | comparison | revisions
--- a/.hgignore	Thu Feb 16 10:27:03 2017 +0200
+++ b/.hgignore	Thu Feb 16 10:27:30 2017 +0200
@@ -6,3 +6,5 @@
 *.orig
 .idea/*
 re:^atmospheric_lidar\.egg-info/
+*.nc
+re:^atmospheric_lidar_reader\.egg-info/
--- a/LICENCE.txt	Thu Feb 16 10:27:03 2017 +0200
+++ b/LICENCE.txt	Thu Feb 16 10:27:30 2017 +0200
@@ -1,6 +1,6 @@
 The MIT License (MIT)
 
-Copyright (c) 2015, Ioannis Binietoglou
+Copyright (c) 2017, Ioannis Binietoglou
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
--- a/atmospheric_lidar/generic.py	Thu Feb 16 10:27:03 2017 +0200
+++ b/atmospheric_lidar/generic.py	Thu Feb 16 10:27:30 2017 +0200
@@ -9,7 +9,7 @@
 from matplotlib import pyplot as plt
 import netCDF4 as netcdf
 
-netcdf_format = 'NETCDF3_CLASSIC' # choose one of 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC' and 'NETCDF4'
+netcdf_format = 'NETCDF3_CLASSIC'  # choose one of 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC' and 'NETCDF4'
 
 
 class BaseLidarMeasurement(object):
@@ -25,25 +25,25 @@
     The one implemented by default is by using the MILOS meteorological station data. 
     
     """
-    
-    def __init__(self, filelist = None):
+
+    def __init__(self, filelist=None):
         self.info = {}
         self.dimensions = {}
-        self.variables =  {}
+        self.variables = {}
         self.channels = {}
         self.attributes = {}
         self.files = []
         self.dark_measurement = None
-        
+
         if filelist:
             self.import_files(filelist)
-        
+
     def import_files(self, filelist):
         for f in filelist:
             self.import_file(f)
         self.update()
 
-    def import_file(self,filename):
+    def import_file(self, filename):
         raise NotImplementedError('Importing files should be defined in the instrument-specific subclass.')
 
     def update(self):
@@ -53,14 +53,14 @@
         
         Reading of the scan_angles parameter is not implemented.
         '''
-        
+
         # Initialize
-        start_time =[]
+        start_time = []
         stop_time = []
         points = []
         all_time_scales = []
         channel_name_list = []
-        
+
         # Get the information from all the channels
         for channel_name, channel in self.channels.items():
             channel.update()
@@ -69,102 +69,102 @@
             points.append(channel.points)
             all_time_scales.append(channel.time)
             channel_name_list.append(channel_name)
-            
+
         # Find the unique time scales used in the channels
         time_scales = set(all_time_scales)
-        
+
         # Update the info dictionary
         self.info['start_time'] = min(start_time)
         self.info['stop_time'] = max(stop_time)
         self.info['duration'] = self.info['stop_time'] - self.info['start_time']
-        
+
         # Update the dimensions dictionary
         self.dimensions['points'] = max(points)
         self.dimensions['channels'] = len(self.channels)
         # self.dimensions['scan angles'] = 1
         self.dimensions['nb_of_time_scales'] = len(time_scales)
-        
+
         # Update the variables dictionary
         # Write time scales in seconds
         raw_Data_Start_Time = []
         raw_Data_Stop_Time = []
-        
-        for current_time_scale in list(time_scales): 
-            raw_start_time = np.array(current_time_scale) - min(start_time) # Time since start_time
-            raw_start_in_seconds = np.array([t.seconds for t in raw_start_time]) # Convert in seconds
-            raw_Data_Start_Time.append(raw_start_in_seconds) # And add to the list
+
+        for current_time_scale in list(time_scales):
+            raw_start_time = np.array(current_time_scale) - min(start_time)  # Time since start_time
+            raw_start_in_seconds = np.array([t.seconds for t in raw_start_time])  # Convert in seconds
+            raw_Data_Start_Time.append(raw_start_in_seconds)  # And add to the list
             # Check if this time scale has measurements every 30 or 60 seconds.
-            
+
             duration = self._get_duration(raw_start_in_seconds)
-            
+
             raw_stop_in_seconds = raw_start_in_seconds + duration
             raw_Data_Stop_Time.append(raw_stop_in_seconds)
-            
+
         self.variables['Raw_Data_Start_Time'] = raw_Data_Start_Time
         self.variables['Raw_Data_Stop_Time'] = raw_Data_Stop_Time
-    
+
         # Make a dictionary to match time scales and channels
         channel_timescales = []
         for (channel_name, current_time_scale) in zip(channel_name_list, all_time_scales):
             # The following lines are PEARL specific. The reason they are here is not clear.
             # if channel_name =='1064BLR':
             #     channel_name = '1064'
-            for (ts,n) in zip(time_scales, range(len(time_scales))):
+            for (ts, n) in zip(time_scales, range(len(time_scales))):
                 if current_time_scale == ts:
-                    channel_timescales.append([channel_name,n])
+                    channel_timescales.append([channel_name, n])
         self.variables['id_timescale'] = dict(channel_timescales)
-    
+
     def _get_duration(self, raw_start_in_seconds):
         ''' Return the duration for a given time scale. In some files (e.g. Licel) this
         can be specified from the files themselves. In others this must be guessed.
          
         '''
         # The old method, kept here for reference
-        #dt = np.mean(np.diff(raw_start_in_seconds))
-        #for d in duration_list:
+        # dt = np.mean(np.diff(raw_start_in_seconds))
+        # for d in duration_list:
         #    if abs(dt - d) <15: #If the difference of measurements is 10s near the(30 or 60) seconds
         #        duration = d
-        
+
         duration = np.diff(raw_start_in_seconds)[0]
 
         return duration
-    
+
     def subset_by_channels(self, channel_subset):
         ''' Get a measurement object containing only the channels with names
         contained in the channel_sublet list '''
-        
+
         m = self.__class__()  # Create an object of the same type as this one.
-        m.channels = dict([(channel, self.channels[channel]) for channel 
-                            in channel_subset])
+        m.channels = dict([(channel, self.channels[channel]) for channel
+                           in channel_subset])
         m.update()
         return m
-        
+
     def subset_by_time(self, start_time, stop_time):
-    
+
         if start_time > stop_time:
             raise ValueError('Stop time should be after start time')
-            
+
         if (start_time < self.info['start_time']) or (stop_time > self.info['stop_time']):
             raise ValueError('The time interval specified is not part of the measurement')
 
-        m = self.__class__() # Create an object of the same type as this one.
-        for (channel_name, channel)  in self.channels.items():
+        m = self.__class__()  # Create an object of the same type as this one.
+        for (channel_name, channel) in self.channels.items():
             m.channels[channel_name] = channel.subset_by_time(start_time, stop_time)
         m.update()
         return m
-    
-    def subset_by_bins(self, b_min = 0, b_max = None):
+
+    def subset_by_bins(self, b_min=0, b_max=None):
         """Remove some height bins from the file. This could be needed to 
         remove aquisition artifacts at the start or the end of the files.
         """
-        
-        m = self.__class__() # Create an object of the same type as this one.
-        
-        for (channel_name, channel)  in self.channels.items():
+
+        m = self.__class__()  # Create an object of the same type as this one.
+
+        for (channel_name, channel) in self.channels.items():
             m.channels[channel_name] = channel.subset_by_bins(b_min, b_max)
-        
+
         m.update()
-        
+
         return m
 
     def rename_channel(self, prefix="", suffix=""):
@@ -184,22 +184,22 @@
         ''' Sets the pressure and temperature at station level .
         The results are stored in the info dictionary.        
         '''
-    
+
         self.info['Temperature'] = 10.0
         self.info['Pressure'] = 930.0
-        
+
     def subtract_dark(self):
-        
+
         if not self.dark_measurement:
             raise IOError('No dark measurements have been imported yet.')
-        
+
         for (channel_name, dark_channel) in self.dark_measurement.channels.iteritems():
             dark_profile = dark_channel.average_profile()
             channel = self.channels[channel_name]
-            
+
             for measurement_time, data in channel.data.iteritems():
                 channel.data[measurement_time] = data - dark_profile
-                
+
             channel.update()
 
     def set_measurement_id(self, measurement_id=None, measurement_number="00"):
@@ -221,19 +221,18 @@
             try:
                 earlinet_station_id = self.extra_netcdf_parameters.general_parameters['Call sign']
             except:
-                raise ValueError("No valid SCC netcdf parameters found. Did you define create the proper subclass?")
+                raise ValueError("No valid SCC netcdf parameters found. Did you define the proper subclass?")
             measurement_id = "{0}{1}{2}".format(date_str, earlinet_station_id, measurement_number)
 
         self.info['Measurement_ID'] = measurement_id
 
-    def save_as_netcdf(self, filename = None):
+    def save_as_netcdf(self, filename=None):
         """Saves the measurement in the netcdf format as required by the SCC.
         Input: filename. If no filename is provided <measurement_id>.nc will
                be used. 
         """
         params = self.extra_netcdf_parameters
 
-
         # Guess measurement ID if none is provided
         if 'Measurement_ID' not in self.info:
             self.set_measurement_id()
@@ -246,124 +245,124 @@
                     self.get_PT()
                 except:
                     raise ValueError('A value needs to be specified for %s' % parameter)
-        
+
         if not filename:
             filename = "%s.nc" % self.info['Measurement_ID']
 
         self.scc_filename = filename
 
         dimensions = {'points': 1,
-                'channels': 1,
-                'time': None,
-                'nb_of_time_scales': 1,
-                'scan_angles': 1,}  # Mandatory dimensions. Time bck not implemented
+                      'channels': 1,
+                      'time': None,
+                      'nb_of_time_scales': 1,
+                      'scan_angles': 1, }  # Mandatory dimensions. Time bck not implemented
 
         global_att = {'Measurement_ID': None,
-                    'RawData_Start_Date': None,
-                    'RawData_Start_Time_UT': None,
-                    'RawData_Stop_Time_UT': None,
-                    'RawBck_Start_Date': None,
-                    'RawBck_Start_Time_UT': None,
-                    'RawBck_Stop_Time_UT': None,
-                    'Sounding_File_Name': None,
-                    'LR_File_Name': None,
-                    'Overlap_File_Name': None,
-                    'Location': None,
-                    'System': None,
-                    'Latitude_degrees_north': None,
-                    'Longitude_degrees_east': None,
-                    'Altitude_meter_asl': None}
+                      'RawData_Start_Date': None,
+                      'RawData_Start_Time_UT': None,
+                      'RawData_Stop_Time_UT': None,
+                      'RawBck_Start_Date': None,
+                      'RawBck_Start_Time_UT': None,
+                      'RawBck_Stop_Time_UT': None,
+                      'Sounding_File_Name': None,
+                      'LR_File_Name': None,
+                      'Overlap_File_Name': None,
+                      'Location': None,
+                      'System': None,
+                      'Latitude_degrees_north': None,
+                      'Longitude_degrees_east': None,
+                      'Altitude_meter_asl': None}
 
         channel_variables = self._get_scc_channel_variables()
 
         channels = self.channels.keys()
 
         input_values = dict(self.dimensions, **self.variables)
-        
+
         # Add some mandatory global attributes
         input_values['Measurement_ID'] = self.info['Measurement_ID']
         input_values['RawData_Start_Date'] = self.info['start_time'].strftime('%Y%m%d')
         input_values['RawData_Start_Time_UT'] = self.info['start_time'].strftime('%H%M%S')
         input_values['RawData_Stop_Time_UT'] = self.info['stop_time'].strftime('%H%M%S')
-        
+
         # Add some optional global attributes
         input_values['System'] = params.general_parameters['System']
         input_values['Latitude_degrees_north'] = params.general_parameters['Latitude_degrees_north']
         input_values['Longitude_degrees_east'] = params.general_parameters['Longitude_degrees_east']
         input_values['Altitude_meter_asl'] = params.general_parameters['Altitude_meter_asl']
-        
+
         # Open a netCDF4 file
-        f = netcdf.Dataset(filename,'w', format = netcdf_format) # the format is specified in the begining of the file
-        
+        f = netcdf.Dataset(filename, 'w', format=netcdf_format)  # the format is specified in the begining of the file
+
         # Create the dimensions in the file
-        for (d,v) in dimensions.iteritems():
+        for (d, v) in dimensions.iteritems():
             v = input_values.pop(d, v)
-            f.createDimension(d,v)
-        
+            f.createDimension(d, v)
+
         # Create global attributes
-        for (attrib,value) in global_att.iteritems():
-            val = input_values.pop(attrib,value)
-            if val: 
+        for (attrib, value) in global_att.iteritems():
+            val = input_values.pop(attrib, value)
+            if val:
                 setattr(f, attrib, val)
-        
+
         """ Variables """
         # Write the values of fixes channel parameters
-        for (var,t) in channel_variables.iteritems():
-            temp_v = f.createVariable(var,t[1],t[0])
+        for (var, t) in channel_variables.iteritems():
+            temp_v = f.createVariable(var, t[1], t[0])
             for (channel, n) in zip(channels, range(len(channels))):
                 temp_v[n] = params.channel_parameters[channel][var]
-        
+
         # Write the id_timescale values
-        temp_id_timescale = f.createVariable('id_timescale','i',('channels',))
+        temp_id_timescale = f.createVariable('id_timescale', 'i', ('channels',))
         for (channel, n) in zip(channels, range(len(channels))):
             temp_id_timescale[n] = self.variables['id_timescale'][channel]
-        
+
         # Laser pointing angle
-        temp_v = f.createVariable('Laser_Pointing_Angle','d',('scan_angles',))
+        temp_v = f.createVariable('Laser_Pointing_Angle', 'd', ('scan_angles',))
         temp_v[:] = params.general_parameters['Laser_Pointing_Angle']
-        
+
         # Molecular calculation
-        temp_v = f.createVariable('Molecular_Calc','i')
-        temp_v[:] = params.general_parameters['Molecular_Calc']    
+        temp_v = f.createVariable('Molecular_Calc', 'i')
+        temp_v[:] = params.general_parameters['Molecular_Calc']
 
         # Laser pointing angles of profiles
-        temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles','i',('time','nb_of_time_scales'))
-        for (time_scale,n) in zip(self.variables['Raw_Data_Start_Time'],
-                                  range(len(self.variables['Raw_Data_Start_Time']))):
+        temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles', 'i', ('time', 'nb_of_time_scales'))
+        for (time_scale, n) in zip(self.variables['Raw_Data_Start_Time'],
+                                   range(len(self.variables['Raw_Data_Start_Time']))):
             temp_v[:len(time_scale), n] = 0  # The lidar has only one laser pointing angle
-        
+
         # Raw data start/stop time
-        temp_raw_start = f.createVariable('Raw_Data_Start_Time','i',('time','nb_of_time_scales'))
-        temp_raw_stop = f.createVariable('Raw_Data_Stop_Time','i',('time','nb_of_time_scales'))
-        for (start_time, stop_time,n) in zip(self.variables['Raw_Data_Start_Time'],
-                                             self.variables['Raw_Data_Stop_Time'],
-                                             range(len(self.variables['Raw_Data_Start_Time']))):
-            temp_raw_start[:len(start_time),n] = start_time
-            temp_raw_stop[:len(stop_time),n] = stop_time
+        temp_raw_start = f.createVariable('Raw_Data_Start_Time', 'i', ('time', 'nb_of_time_scales'))
+        temp_raw_stop = f.createVariable('Raw_Data_Stop_Time', 'i', ('time', 'nb_of_time_scales'))
+        for (start_time, stop_time, n) in zip(self.variables['Raw_Data_Start_Time'],
+                                              self.variables['Raw_Data_Stop_Time'],
+                                              range(len(self.variables['Raw_Data_Start_Time']))):
+            temp_raw_start[:len(start_time), n] = start_time
+            temp_raw_stop[:len(stop_time), n] = stop_time
 
-        #Laser shots
-        temp_v =  f.createVariable('Laser_Shots','i',('time','channels'))
-        for (channel,n) in zip(channels, range(len(channels))):
+        # Laser shots
+        temp_v = f.createVariable('Laser_Shots', 'i', ('time', 'channels'))
+        for (channel, n) in zip(channels, range(len(channels))):
             time_length = len(self.variables['Raw_Data_Start_Time'][self.variables['id_timescale'][channel]])
             # Array slicing stoped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
             temp_v[:time_length, n] = np.ones(time_length) * params.channel_parameters[channel]['Laser_Shots']
 
-        #Raw lidar data
-        temp_v = f.createVariable('Raw_Lidar_Data','d',('time', 'channels','points'))
-        for (channel,n) in zip(channels, range(len(channels))):
+        # Raw lidar data
+        temp_v = f.createVariable('Raw_Lidar_Data', 'd', ('time', 'channels', 'points'))
+        for (channel, n) in zip(channels, range(len(channels))):
             c = self.channels[channel]
-            temp_v[:len(c.time),n, :c.points] = c.matrix
-        
+            temp_v[:len(c.time), n, :c.points] = c.matrix
+
         self.add_dark_measurements_to_netcdf(f, channels)
-            
-        #Pressure at lidar station
-        temp_v = f.createVariable('Pressure_at_Lidar_Station','d')
+
+        # Pressure at lidar station
+        temp_v = f.createVariable('Pressure_at_Lidar_Station', 'd')
         temp_v[:] = self.info['Pressure']
 
-        #Temperature at lidar station
-        temp_v = f.createVariable('Temperature_at_Lidar_Station','d')
-        temp_v[:] = self.info['Temperature']   
-        
+        # Temperature at lidar station
+        temp_v = f.createVariable('Temperature_at_Lidar_Station', 'd')
+        temp_v[:] = self.info['Temperature']
+
         self.save_netcdf_extra(f)
         f.close()
 
@@ -384,34 +383,34 @@
         # return without adding something.
         if self.dark_measurement is None:
             self.dark_measurement = self.get_dark_measurements()
-        
+
         if self.dark_measurement is None:
             return
-        
+
         dark_measurement = self.dark_measurement
-        
+
         # Calculate the length of the time_bck dimensions
         number_of_profiles = [len(c.time) for c in dark_measurement.channels.values()]
         max_number_of_profiles = np.max(number_of_profiles)
-        
+
         # Create the dimension
         f.createDimension('time_bck', max_number_of_profiles)
-         
+
         # Save the dark measurement data
-        temp_v = f.createVariable('Background_Profile','d',('time_bck', 'channels', 'points'))
-        for (channel,n) in zip(channels, range(len(channels))):
+        temp_v = f.createVariable('Background_Profile', 'd', ('time_bck', 'channels', 'points'))
+        for (channel, n) in zip(channels, range(len(channels))):
             c = dark_measurement.channels[channel]
-            temp_v[:len(c.time),n, :c.points] = c.matrix
-        
+            temp_v[:len(c.time), n, :c.points] = c.matrix
+
         # Dark profile start/stop time
-        temp_raw_start = f.createVariable('Raw_Bck_Start_Time','i',('time_bck','nb_of_time_scales'))
-        temp_raw_stop = f.createVariable('Raw_Bck_Stop_Time','i',('time_bck','nb_of_time_scales'))
-        for (start_time, stop_time,n) in zip(dark_measurement.variables['Raw_Data_Start_Time'],
-                                             dark_measurement.variables['Raw_Data_Stop_Time'],
-                                             range(len(dark_measurement.variables['Raw_Data_Start_Time']))):
-            temp_raw_start[:len(start_time),n] = start_time
-            temp_raw_stop[:len(stop_time),n] = stop_time
-        
+        temp_raw_start = f.createVariable('Raw_Bck_Start_Time', 'i', ('time_bck', 'nb_of_time_scales'))
+        temp_raw_stop = f.createVariable('Raw_Bck_Stop_Time', 'i', ('time_bck', 'nb_of_time_scales'))
+        for (start_time, stop_time, n) in zip(dark_measurement.variables['Raw_Data_Start_Time'],
+                                              dark_measurement.variables['Raw_Data_Stop_Time'],
+                                              range(len(dark_measurement.variables['Raw_Data_Start_Time']))):
+            temp_raw_start[:len(start_time), n] = start_time
+            temp_raw_stop[:len(stop_time), n] = stop_time
+
         # Dark measurement start/stop time
         f.RawBck_Start_Date = dark_measurement.info['start_time'].strftime('%Y%m%d')
         f.RawBck_Start_Time_UT = dark_measurement.info['start_time'].strftime('%H%M%S')
@@ -419,19 +418,19 @@
 
     def save_netcdf_extra(self, f):
         pass
-        
-    def _gettime(self, date_str, time_str):        
-        t = datetime.datetime.strptime(date_str+time_str,'%d/%m/%Y%H.%M.%S')
+
+    def _gettime(self, date_str, time_str):
+        t = datetime.datetime.strptime(date_str + time_str, '%d/%m/%Y%H.%M.%S')
         return t
-    
+
     def plot(self):
         for channel in self.channels:
-            self.channels[channel].plot(show_plot = False)
+            self.channels[channel].plot(show_plot=False)
         plt.show()
-    
+
     def get_dark_measurements(self):
         return None
-    
+
     @property
     def mean_time(self):
         start_time = self.info['start_time']
@@ -442,7 +441,6 @@
 
 
 class LidarChannel:
-
     def __init__(self, channel_parameters):
         c = 299792458  # Speed of light
         self.wavelength = channel_parameters['name']
@@ -450,52 +448,53 @@
         self.binwidth = float(channel_parameters['binwidth'])  # in microseconds
         self.data = {}
         self.resolution = self.binwidth * c / 2
-        self.z = np.arange(len(channel_parameters['data'])) * self.resolution + self.resolution / 2.0 # Change: add half bin in the z 
+        self.z = np.arange(
+            len(channel_parameters['data'])) * self.resolution + self.resolution / 2.0  # Change: add half bin in the z
         self.points = len(channel_parameters['data'])
         self.rc = []
-        self.duration  = 60
-        
-    def calculate_rc(self, idx_min = 4000, idx_max = 5000):
-        background = np.mean(self.matrix[:,idx_min:idx_max], axis = 1)  # Calculate the background from 30000m and above
-        self.rc = (self.matrix.transpose()- background).transpose() * (self.z **2)
-    
+        self.duration = 60
+
+    def calculate_rc(self, idx_min=4000, idx_max=5000):
+        background = np.mean(self.matrix[:, idx_min:idx_max], axis=1)  # Calculate the background from 30000m and above
+        self.rc = (self.matrix.transpose() - background).transpose() * (self.z ** 2)
+
     def update(self):
         self.start_time = min(self.data.keys())
-        self.stop_time = max(self.data.keys()) + datetime.timedelta(seconds = self.duration)
+        self.stop_time = max(self.data.keys()) + datetime.timedelta(seconds=self.duration)
         self.time = tuple(sorted(self.data.keys()))
         sorted_data = sorted(self.data.iteritems(), key=itemgetter(0))
-        self.matrix =  np.array(map(itemgetter(1),sorted_data))
-    
-    def _nearest_dt(self,dtime):
-        margin = datetime.timedelta(seconds = 300)        
-        if ((dtime + margin) < self.start_time)| ((dtime - margin)   > self.stop_time):
+        self.matrix = np.array(map(itemgetter(1), sorted_data))
+
+    def _nearest_dt(self, dtime):
+        margin = datetime.timedelta(seconds=300)
+        if ((dtime + margin) < self.start_time) | ((dtime - margin) > self.stop_time):
             print "Requested date not covered in this file"
             raise
         dt = abs(self.time - np.array(dtime))
         dtmin = min(dt)
 
-        if dtmin > datetime.timedelta(seconds = 60):
+        if dtmin > datetime.timedelta(seconds=60):
             print "Nearest profile more than 60 seconds away. dt = %s." % dtmin
         ind_t = np.where(dt == dtmin)
-        ind_a= ind_t[0]
+        ind_a = ind_t[0]
         if len(ind_a) > 1:
             ind_a = ind_a[0]
         chosen_time = self.time[ind_a]
         return chosen_time, ind_a
-    
+
     def subset_by_time(self, start_time, stop_time):
-        
+
         time_array = np.array(self.time)
         condition = (time_array >= start_time) & (time_array <= stop_time)
-        
+
         subset_time = time_array[condition]
         subset_data = dict([(c_time, self.data[c_time]) for c_time in subset_time])
-        
-        #Create a list with the values needed by channel's __init__()
-        parameter_values = {'name': self.wavelength, 
-                             'binwidth': self.binwidth, 
-                             'data': subset_data[subset_time[0]],}
-        
+
+        # Create a list with the values needed by channel's __init__()
+        parameter_values = {'name': self.wavelength,
+                            'binwidth': self.binwidth,
+                            'data': subset_data[subset_time[0]], }
+
         # We should use __class__ instead of class name, so that this works properly
         # with subclasses
         # Ex: c = self.__class__(parameters_values)  
@@ -504,38 +503,39 @@
         c.data = subset_data
         c.update()
         return c
-    
-    def subset_by_bins(self, b_min = 0, b_max = None):
+
+    def subset_by_bins(self, b_min=0, b_max=None):
         """Remove some height bins from the file. This could be needed to 
         remove aquisition artifacts at the start or the end of the files.
         """
-        
+
         subset_data = {}
-        
+
         for ctime, cdata in self.data.items():
             subset_data[ctime] = cdata[b_min:b_max]
-            
-        #Create a list with the values needed by channel's __init__()
-        parameters_values = {'name': self.wavelength, 
-                             'binwidth': self.binwidth, 
-                             'data': subset_data[subset_data.keys()[0]],}  # We just need any array with the correct length
+
+        # Create a list with the values needed by channel's __init__()
+        parameters_values = {'name': self.wavelength,
+                             'binwidth': self.binwidth,
+                             'data': subset_data[
+                                 subset_data.keys()[0]], }  # We just need any array with the correct length
 
         c = LidarChannel(parameters_values)
         c.data = subset_data
         c.update()
         return c
 
-    def profile(self,dtime, signal_type = 'rc'):
+    def profile(self, dtime, signal_type='rc'):
         t, idx = self._nearest_dt(dtime)
         if signal_type == 'rc':
             data = self.rc
         else:
             data = self.matrix
-        
-        prof = data[idx,:][0]
+
+        prof = data[idx, :][0]
         return prof, t
 
-    def get_slice(self, starttime, endtime, signal_type = 'rc'):
+    def get_slice(self, starttime, endtime, signal_type='rc'):
         if signal_type == 'rc':
             data = self.rc
         else:
@@ -544,205 +544,202 @@
         starttime = self._nearest_dt(starttime)[0]
         endtime = self._nearest_dt(endtime)[0]
         condition = (tim >= starttime) & (tim <= endtime)
-        sl  = data[condition, :]
+        sl = data[condition, :]
         t = tim[condition]
-        return sl,t
-    
-    def profile_for_duration(self, tim, duration = datetime.timedelta(seconds = 0), signal_type = 'rc'):
+        return sl, t
+
+    def profile_for_duration(self, tim, duration=datetime.timedelta(seconds=0), signal_type='rc'):
         """ Calculates the profile around a specific time (tim). """
-        starttime = tim - duration/2
-        endtime = tim + duration/2
-        d,t = self.get_slice(starttime, endtime, signal_type = signal_type)
-        prof = np.mean(d, axis = 0)
+        starttime = tim - duration / 2
+        endtime = tim + duration / 2
+        d, t = self.get_slice(starttime, endtime, signal_type=signal_type)
+        prof = np.mean(d, axis=0)
         tmin = min(t)
         tmax = max(t)
-        tav = tmin + (tmax-tmin)/2
-        return prof,(tav, tmin,tmax)
-    
+        tav = tmin + (tmax - tmin) / 2
+        return prof, (tav, tmin, tmax)
+
     def average_profile(self):
         """ Return the average profile (NOT range corrected) for all the duration of the measurement. """
-        prof = np.mean(self.matrix, axis = 0)
+        prof = np.mean(self.matrix, axis=0)
         return prof
-    
-    def plot(self, signal_type = 'rc', filename = None, zoom = [0,12000,0,-1], show_plot = True, cmap = plt.cm.jet, z0 = None, title = None, vmin = 0, vmax = 1.3 * 10 ** 7):
-        #if filename is not None:
+
+    def plot(self, signal_type='rc', filename=None, zoom=[0, 12000, 0, -1], show_plot=True, cmap=plt.cm.jet, z0=None,
+             title=None, vmin=0, vmax=1.3 * 10 ** 7):
+        # if filename is not None:
         #    matplotlib.use('Agg')
-        
+
         fig = plt.figure()
         ax1 = fig.add_subplot(111)
-        self.draw_plot(ax1, cmap = cmap, signal_type = signal_type, zoom = zoom, z0 = z0, vmin = vmin, vmax = vmax)
-        
+        self.draw_plot(ax1, cmap=cmap, signal_type=signal_type, zoom=zoom, z0=z0, vmin=vmin, vmax=vmax)
+
         if title:
             ax1.set_title(title)
         else:
-            ax1.set_title("%s signal - %s" % (signal_type.upper(), self.name)) 
-        
+            ax1.set_title("%s signal - %s" % (signal_type.upper(), self.name))
+
         if filename is not None:
             pass
-            #plt.savefig(filename)
+            # plt.savefig(filename)
         else:
             if show_plot:
                 plt.show()
-        #plt.close() ???
-    
-    def draw_plot(self,ax1, cmap = plt.cm.jet, signal_type = 'rc', 
-                            zoom = [0,12000,0,-1], z0 = None, 
-                            add_colorbar = True, cmap_label = 'a.u.',  cb_format = None, 
-                            vmin = 0, vmax = 1.3 * 10 ** 7):
-        
+                # plt.close() ???
+
+    def draw_plot(self, ax1, cmap=plt.cm.jet, signal_type='rc',
+                  zoom=[0, 12000, 0, -1], z0=None,
+                  add_colorbar=True, cmap_label='a.u.', cb_format=None,
+                  vmin=0, vmax=1.3 * 10 ** 7):
+
         if signal_type == 'rc':
             if len(self.rc) == 0:
                 self.calculate_rc()
             data = self.rc
         else:
             data = self.matrix
-        
+
         hmax_idx = self.index_at_height(zoom[1])
-        
+
         # If z0 is given, then the plot is a.s.l.
         if z0:
             ax1.set_ylabel('Altitude a.s.l. [km]')
         else:
             ax1.set_ylabel('Altitude a.g.l. [km]')
             z0 = 0
-            
+
         ax1.set_xlabel('Time UTC [hh:mm]')
-        #y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
-        #dateFormatter = mpl.dates.DateFormatter('%H.%M')
-        #hourlocator = mpl.dates.HourLocator()
-        
-        #dayFormatter = mpl.dates.DateFormatter('\n\n%d/%m')
-        #daylocator = mpl.dates.DayLocator()       
+        # y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
+        # dateFormatter = mpl.dates.DateFormatter('%H.%M')
+        # hourlocator = mpl.dates.HourLocator()
+
+        # dayFormatter = mpl.dates.DateFormatter('\n\n%d/%m')
+        # daylocator = mpl.dates.DayLocator()
         hourFormatter = mpl.dates.DateFormatter('%H:%M')
-        hourlocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
+        hourlocator = mpl.dates.AutoDateLocator(minticks=3, maxticks=8, interval_multiples=True)
 
-        
-        #ax1.axes.xaxis.set_major_formatter(dayFormatter)
-        #ax1.axes.xaxis.set_major_locator(daylocator)
+        # ax1.axes.xaxis.set_major_formatter(dayFormatter)
+        # ax1.axes.xaxis.set_major_locator(daylocator)
         ax1.axes.xaxis.set_major_formatter(hourFormatter)
         ax1.axes.xaxis.set_major_locator(hourlocator)
 
-        
         ts1 = mpl.dates.date2num(self.start_time)
         ts2 = mpl.dates.date2num(self.stop_time)
-        
-        
-        im1 = ax1.imshow(data.transpose()[zoom[0]:hmax_idx,zoom[2]:zoom[3]],
-            aspect = 'auto',
-            origin = 'lower',
-            cmap = cmap,
-            vmin = vmin,
-            #vmin = data[:,10:400].max() * 0.1,
-            vmax = vmax,
-            #vmax = data[:,10:400].max() * 0.9,
-            extent = [ts1,ts2,self.z[zoom[0]]/1000.0 + z0/1000., self.z[hmax_idx]/1000.0 + z0/1000.],
-            )
-            
+
+        im1 = ax1.imshow(data.transpose()[zoom[0]:hmax_idx, zoom[2]:zoom[3]],
+                         aspect='auto',
+                         origin='lower',
+                         cmap=cmap,
+                         vmin=vmin,
+                         # vmin = data[:,10:400].max() * 0.1,
+                         vmax=vmax,
+                         # vmax = data[:,10:400].max() * 0.9,
+                         extent=[ts1, ts2, self.z[zoom[0]] / 1000.0 + z0 / 1000.,
+                                 self.z[hmax_idx] / 1000.0 + z0 / 1000.],
+                         )
+
         if add_colorbar:
             if cb_format:
-                cb1 = plt.colorbar(im1, format = cb_format)
+                cb1 = plt.colorbar(im1, format=cb_format)
             else:
                 cb1 = plt.colorbar(im1)
             cb1.ax.set_ylabel(cmap_label)
-            
-            # Make the ticks of the colorbar smaller, two points smaller than the default font size
-            cb_font_size = mpl.rcParams['font.size'] - 2
-            for ticklabels in cb1.ax.get_yticklabels():
-                ticklabels.set_fontsize(cb_font_size)
-            cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
-
-    
-    def draw_plot_new(self, ax1, cmap = plt.cm.jet, signal_type = 'rc', 
-                            zoom = [0, 12000, 0, None], z0 = None, 
-                            add_colorbar = True, cmap_label = 'a.u.', 
-                            cb_format = None, power_limits = (-2, 2),
-                            date_labels = False, 
-                            vmin = 0, vmax = 1.3 * 10 ** 7):
-        
-        if signal_type == 'rc':
-            if len(self.rc) == 0:
-                self.calculate_rc()
-            data = self.rc
-        else:
-            data = self.matrix
-        
-        hmax_idx = self.index_at_height(zoom[1])
-        hmin_idx = self.index_at_height(zoom[0])
-        
-        # If z0 is given, then the plot is a.s.l.
-        if z0:
-            ax1.set_ylabel('Altitude a.s.l. [km]')
-        else:
-            ax1.set_ylabel('Altitude a.g.l. [km]')
-            z0 = 0
-            
-        ax1.set_xlabel('Time UTC [hh:mm]')
-        #y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
-        #dateFormatter = mpl.dates.DateFormatter('%H.%M')
-        #hourlocator = mpl.dates.HourLocator()
-        
-        
-        if date_labels:
-            dayFormatter = mpl.dates.DateFormatter('%H:%M\n%d/%m/%Y')
-            daylocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
-            ax1.axes.xaxis.set_major_formatter(dayFormatter)
-            ax1.axes.xaxis.set_major_locator(daylocator)
-        else:
-            hourFormatter = mpl.dates.DateFormatter('%H:%M')
-            hourlocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
-            ax1.axes.xaxis.set_major_formatter(hourFormatter)
-            ax1.axes.xaxis.set_major_locator(hourlocator)
-
-        
-        # Get the values of the time axis
-        dt = datetime.timedelta(seconds = self.duration)
-        
-        time_cut = self.time[zoom[2]:zoom[3]]
-        time_last = time_cut[-1] + dt  # The last element needed for pcolormesh
-        
-        time_all = time_cut + (time_last,)
-        
-        t_axis = mpl.dates.date2num(time_all)
-        
-        # Get the values of the z axis
-        z_cut = self.z[hmin_idx:hmax_idx] - self.resolution / 2.
-        z_last = z_cut[-1] + self.resolution
-        
-        z_axis = np.append(z_cut, z_last) / 1000. + z0 / 1000. # Convert to km
-        
-        # Plot
-        im1 = ax1.pcolormesh(t_axis, z_axis, data.T[hmin_idx:hmax_idx, zoom[2]:zoom[3]],
-                                cmap = cmap,
-                                vmin = vmin,
-                                vmax = vmax,
-                                )
-        
-        if add_colorbar:
-            if cb_format:
-                cb1 = plt.colorbar(im1, format = cb_format)
-            else:
-                cb_formatter = ScalarFormatter()
-                cb_formatter.set_powerlimits(power_limits)                
-                cb1 = plt.colorbar(im1, format = cb_formatter)
-            cb1.ax.set_ylabel(cmap_label)
 
             # Make the ticks of the colorbar smaller, two points smaller than the default font size
             cb_font_size = mpl.rcParams['font.size'] - 2
             for ticklabels in cb1.ax.get_yticklabels():
                 ticklabels.set_fontsize(cb_font_size)
             cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
-    
+
+    def draw_plot_new(self, ax1, cmap=plt.cm.jet, signal_type='rc',
+                      zoom=[0, 12000, 0, None], z0=None,
+                      add_colorbar=True, cmap_label='a.u.',
+                      cb_format=None, power_limits=(-2, 2),
+                      date_labels=False,
+                      vmin=0, vmax=1.3 * 10 ** 7):
+
+        if signal_type == 'rc':
+            if len(self.rc) == 0:
+                self.calculate_rc()
+            data = self.rc
+        else:
+            data = self.matrix
+
+        hmax_idx = self.index_at_height(zoom[1])
+        hmin_idx = self.index_at_height(zoom[0])
+
+        # If z0 is given, then the plot is a.s.l.
+        if z0:
+            ax1.set_ylabel('Altitude a.s.l. [km]')
+        else:
+            ax1.set_ylabel('Altitude a.g.l. [km]')
+            z0 = 0
+
+        ax1.set_xlabel('Time UTC [hh:mm]')
+        # y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
+        # dateFormatter = mpl.dates.DateFormatter('%H.%M')
+        # hourlocator = mpl.dates.HourLocator()
+
+
+        if date_labels:
+            dayFormatter = mpl.dates.DateFormatter('%H:%M\n%d/%m/%Y')
+            daylocator = mpl.dates.AutoDateLocator(minticks=3, maxticks=8, interval_multiples=True)
+            ax1.axes.xaxis.set_major_formatter(dayFormatter)
+            ax1.axes.xaxis.set_major_locator(daylocator)
+        else:
+            hourFormatter = mpl.dates.DateFormatter('%H:%M')
+            hourlocator = mpl.dates.AutoDateLocator(minticks=3, maxticks=8, interval_multiples=True)
+            ax1.axes.xaxis.set_major_formatter(hourFormatter)
+            ax1.axes.xaxis.set_major_locator(hourlocator)
+
+        # Get the values of the time axis
+        dt = datetime.timedelta(seconds=self.duration)
+
+        time_cut = self.time[zoom[2]:zoom[3]]
+        time_last = time_cut[-1] + dt  # The last element needed for pcolormesh
+
+        time_all = time_cut + (time_last,)
+
+        t_axis = mpl.dates.date2num(time_all)
+
+        # Get the values of the z axis
+        z_cut = self.z[hmin_idx:hmax_idx] - self.resolution / 2.
+        z_last = z_cut[-1] + self.resolution
+
+        z_axis = np.append(z_cut, z_last) / 1000. + z0 / 1000.  # Convert to km
+
+        # Plot
+        im1 = ax1.pcolormesh(t_axis, z_axis, data.T[hmin_idx:hmax_idx, zoom[2]:zoom[3]],
+                             cmap=cmap,
+                             vmin=vmin,
+                             vmax=vmax,
+                             )
+
+        if add_colorbar:
+            if cb_format:
+                cb1 = plt.colorbar(im1, format=cb_format)
+            else:
+                cb_formatter = ScalarFormatter()
+                cb_formatter.set_powerlimits(power_limits)
+                cb1 = plt.colorbar(im1, format=cb_formatter)
+            cb1.ax.set_ylabel(cmap_label)
+
+            # Make the ticks of the colorbar smaller, two points smaller than the default font size
+            cb_font_size = mpl.rcParams['font.size'] - 2
+            for ticklabels in cb1.ax.get_yticklabels():
+                ticklabels.set_fontsize(cb_font_size)
+            cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
+
     def index_at_height(self, height):
         idx = np.array(np.abs(self.z - height).argmin())
         if idx.size > 1:
-            idx =idx[0]
+            idx = idx[0]
         return idx
-    
+
+
 def netcdf_from_files(LidarClass, filename, files, channels, measurement_ID):
-    #Read the lidar files and select channels
+    # Read the lidar files and select channels
     temp_m = LidarClass(files)
     m = temp_m.subset_by_channels(channels)
     m.get_PT()
     m.info['Measurement_ID'] = measurement_ID
     m.save_as_netcdf(filename)
-
--- a/atmospheric_lidar/requirements.txt	Thu Feb 16 10:27:03 2017 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/atmospheric_lidar/scripts/licel2scc.py	Thu Feb 16 10:27:30 2017 +0200
@@ -0,0 +1,108 @@
+""" Command line tool to convert Licel binary files to SCC NetCDF format.
+"""
+import os
+import sys
+import glob
+import argparse
+import importlib
+
+from ..licel import LicelLidarMeasurement
+
+
+def create_custom_class(custom_netcdf_parameter_path, use_id_as_name=False, temperature=25., pressure=1020.):
+    """ This funtion creates a custom LicelLidarMeasurement subclass,
+    based on the input provided by the users.
+
+    Parameters
+    ----------
+    custom_netcdf_parameter_path: str
+       The path to the custom channels parameters.
+    use_id_as_name: bool
+       Defines if channels names are descriptive or transient digitizer IDs.
+    temperature: float
+       The ground temperature in degrees C (default 25.0).
+    pressure: float
+       The ground pressure in hPa (default: 1020.0).
+
+    Returns
+    -------
+    CustomLidarMeasurement:
+       A custom sub-class of LicelLidarMeasurement
+    """
+
+    custom_netcdf_parameters = read_settings_file(custom_netcdf_parameter_path)
+
+    class CustomLidarMeasurement(LicelLidarMeasurement):
+        extra_netcdf_parameters = custom_netcdf_parameters
+
+        def __init__(self, filelist=None):
+            super(CustomLidarMeasurement, self).__init__(filelist, use_id_as_name)
+
+        def get_PT(self):
+            ''' Sets the pressure and temperature at station level. This is used if molecular_calc parameter is
+            set to 0 (i.e. use US Standard atmosphere).
+
+            The results are stored in the info dictionary.
+            '''
+
+            self.info['Temperature'] = temperature
+            self.info['Pressure'] = pressure
+
+    return CustomLidarMeasurement
+
+
+def read_settings_file(settings_path):
+    """ Read the settings file.
+
+    The file should contain python code."""
+    if not os.path.isfile(settings_path):
+        raise IOError("The provided settings path does not correspond to a file.")
+
+    dirname, basename = os.path.split(settings_path)
+    sys.path.append(dirname)
+
+    module_name, _ = os.path.splitext(basename)
+    settings = importlib.import_module(module_name)
+    return settings
+
+
+def main():
+    # Define the command line argument
+    parser = argparse.ArgumentParser(description="A program to convert LICEL binary files to the SCC NetCDF format.")
+    parser.add_argument("parameter_file", help="The path to a parameter file linking licel and SCC channels.")
+    parser.add_argument("directory", nargs='?', help="Directory containing licel files (default '.')", default='.')
+    parser.add_argument("search_string", nargs='?', help="Search string for files in directory (default '*.*')", default="*.*")
+    parser.add_argument("-i", '--id_as_name',
+                        help="Use transient digitizer ids as channel names, instead of descriptive names",
+                        action="store_true")
+    parser.add_argument("-m", "--measurement_id", help="The new measurement id", default=None)
+    parser.add_argument("-n", "--measurement_number",
+                        help="The measurement number for the date from 00 to 99. Used if no id is provided",
+                        default="00")
+    parser.add_argument("-t", "--temperature", type=float,
+                        help="The temperature (in C) at lidar level, required if using US Standard atmosphere",
+                        default="25")
+    parser.add_argument("-p", "--pressure", type=float,
+                        help="The pressure (in hPa) at lidar level, required if using US Standard atmosphere",
+                        default="1020")
+    args = parser.parse_args()
+
+    # Get a list of files to convert
+    search_str = os.path.join(args.directory, args.search_string)
+    files = glob.glob(search_str)
+
+    print(args.parameter_file)
+    if files:
+        # Read the files
+        print "Reading {0} files from {1}".format(len(files), args.directory)
+        CustomLidarMeasurement = create_custom_class(args.parameter_file, args.id_as_name, args.temperature,
+                                                     args.pressure)
+        measurement = CustomLidarMeasurement(files)
+
+        # Save the netcdf
+        print "Saving netcdf."
+        measurement.set_measurement_id(args.measurement_id, args.measurement_number)
+        measurement.save_as_netcdf()
+        print "Created file ", measurement.scc_filename
+    else:
+        print "No files found when searching for ", search_str
--- a/example_script/convert_ipral.py	Thu Feb 16 10:27:03 2017 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-""" Sample script to convert licel files to SCC netcdf format.
-
-The script assumes the following things:
-
-1. You have already installed the atmospheric_lidar module (e.g. using pip).
-2. You have create a class in a file "ipral" describing your system (described the readme file).
-   If you want to use it for a different system you need to change the script to use your own class
-
-Run the script using: python convert_ipral.py <your options>
-
-Examples
---------
-# This will read all files starting with "l" from "my_dir".
-# It will guess the measurement ID based on the date of the files, and will assume measurement number 00.
-# For example, the new measurment id could be 20120101mb00
-python convert_ipral.py my_dir l*.
-
-# This will use the measurement id you defined.
-python convert_ipral.py my_dir l*. -m 20120204mb32  # It will create the file 20120204mb32.nc
-
-Help string
------------
-# To get this, run: python convert_ipral.py -h
-usage: convert_ipral.py [-h] [-m MEASUREMENT_ID] [-n MEASUREMENT_NUMBER]
-                        [directory] [searchstring]
-
-positional arguments:
-  directory             Directory with licel files.
-  searchstring          Processing system id.
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -m MEASUREMENT_ID, --measurement_id MEASUREMENT_ID
-                        The new measurement id
-  -n MEASUREMENT_NUMBER, --measurement_number MEASUREMENT_NUMBER
-                        The measurement number for the date, if no id is
-                        provided
-
-"""
-
-import glob
-import os
-import argparse
-
-from atmospheric_lidar import ipral
-
-
-if __name__ == "__main__":
-
-    # Define the command line arguments.
-    parser = argparse.ArgumentParser()
-    parser.add_argument("directory", nargs='?', help="Directory with licel files.", default='.')
-    parser.add_argument("searchstring", nargs='?', help="Processing system id.", default="*.*")
-    parser.add_argument("-m", "--measurement_id", help="The new measurement id", default=None)
-    parser.add_argument("-n", "--measurement_number", help="The measurement number for the date, if no id is provided", default="00")
-    args = parser.parse_args()
-
-
-    # Get a list of files to convert
-    search_str = os.path.join(args.directory, args.searchstring)
-    files = glob.glob(search_str)
-
-    if files:
-        # Read the files
-        print "Reading {0} files from {1}".format(len(files), args.directory)
-        measurement = ipral.IpralLidarMeasurement(files)
-
-        #Save the netcdf
-        print "Saving netcdf."
-        measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-        measurement.save_as_netcdf()
-        print "Created file ", measurement.scc_filename
-    else:
-        print "No files found when searching for ", search_str
\ No newline at end of file
--- a/example_script/convert_lilas.py	Thu Feb 16 10:27:03 2017 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-""" Sample script to convert licel files to SCC netcdf format.
-
-The script assumes the following things:
-
-1. You have already installed the atmospheric_lidar module (e.g. using pip).
-2. You have create a class in a file "lilas" describing your system (described the readme file).
-   If you want to use it for a different system you need to change the script to use your own class
-
-Run the script using: python convert_lilas.py <your options>
-
-Examples
---------
-# This will read all files starting with "l" from "my_dir".
-# It will guess the measurement ID based on the date of the files, and will assume measurement number 00.
-# For example, the new measurment id could be 20120101mb00
-python convert_lilas.py my_dir l*.
-
-# This will use the measurement id you defined.
-python convert_lilas.py my_dir l*. -m 20120204mb32  # It will create the file 20120204mb32.nc
-
-Help string
------------
-# To get this, run: python convert_lilas.py -h
-usage: convert_lilas.py [-h] [-m MEASUREMENT_ID] [-n MEASUREMENT_NUMBER]
-                        [directory] [searchstring]
-
-positional arguments:
-  directory             Directory with licel files.
-  searchstring          Processing system id.
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -m MEASUREMENT_ID, --measurement_id MEASUREMENT_ID
-                        The new measurement id
-  -n MEASUREMENT_NUMBER, --measurement_number MEASUREMENT_NUMBER
-                        The measurement number for the date, if no id is
-                        provided
-
-"""
-
-import glob
-import os
-import argparse
-
-from atmospheric_lidar import lilas
-
-
-if __name__ == "__main__":
-
-    # Define the command line arguments.
-    parser = argparse.ArgumentParser()
-    parser.add_argument("directory", nargs='?', help="Directory with licel files.", default='.')
-    parser.add_argument("searchstring", nargs='?', help="Processing system id.", default="*.*")
-    parser.add_argument("-m", "--measurement_id", help="The new measurement id", default=None)
-    parser.add_argument("-n", "--measurement_number", help="The measurement number for the date, if no id is provided", default="00")
-    args = parser.parse_args()
-
-
-    # Get a list of files to convert
-    search_str = os.path.join(args.directory, args.searchstring)
-    files = glob.glob(search_str)
-
-    if files:
-        # Read the files
-        print "Reading {0} files from {1}".format(len(files), args.directory)
-        measurement = lilas.LilasLidarMeasurement(files)
-
-        #Save the netcdf
-        print "Saving netcdf."
-        measurement.set_measurement_id(args.measurement_id, args.measurement_number)
-        measurement.save_as_netcdf()
-        print "Created file ", measurement.scc_filename
-    else:
-        print "No files found when searching for ", search_str
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/example_scripts/convert_ipral.py	Thu Feb 16 10:27:30 2017 +0200
@@ -0,0 +1,74 @@
+""" Sample script to convert licel files to SCC netcdf format.
+
+The script assumes the following things:
+
+1. You have already installed the atmospheric_lidar module (e.g. using pip).
+2. You have create a class in a file "ipral" describing your system (described the readme file).
+   If you want to use it for a different system you need to change the script to use your own class
+
+Run the script using: python convert_ipral.py <your options>
+
+Examples
+--------
+# This will read all files starting with "l" from "my_dir".
+# It will guess the measurement ID based on the date of the files, and will assume measurement number 00.
+# For example, the new measurment id could be 20120101mb00
+python convert_ipral.py my_dir l*.
+
+# This will use the measurement id you defined.
+python convert_ipral.py my_dir l*. -m 20120204mb32  # It will create the file 20120204mb32.nc
+
+Help string
+-----------
+# To get this, run: python convert_ipral.py -h
+usage: convert_ipral.py [-h] [-m MEASUREMENT_ID] [-n MEASUREMENT_NUMBER]
+                        [directory] [searchstring]
+
+positional arguments:
+  directory             Directory with licel files.
+  searchstring          Processing system id.
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -m MEASUREMENT_ID, --measurement_id MEASUREMENT_ID
+                        The new measurement id
+  -n MEASUREMENT_NUMBER, --measurement_number MEASUREMENT_NUMBER
+                        The measurement number for the date, if no id is
+                        provided
+
+"""
+
+import glob
+import os
+import argparse
+
+from atmospheric_lidar import ipral
+
+
+if __name__ == "__main__":
+
+    # Define the command line arguments.
+    parser = argparse.ArgumentParser()
+    parser.add_argument("directory", nargs='?', help="Directory with licel files.", default='.')
+    parser.add_argument("searchstring", nargs='?', help="Processing system id.", default="*.*")
+    parser.add_argument("-m", "--measurement_id", help="The new measurement id", default=None)
+    parser.add_argument("-n", "--measurement_number", help="The measurement number for the date, if no id is provided", default="00")
+    args = parser.parse_args()
+
+
+    # Get a list of files to convert
+    search_str = os.path.join(args.directory, args.searchstring)
+    files = glob.glob(search_str)
+
+    if files:
+        # Read the files
+        print "Reading {0} files from {1}".format(len(files), args.directory)
+        measurement = ipral.IpralLidarMeasurement(files)
+
+        #Save the netcdf
+        print "Saving netcdf."
+        measurement.set_measurement_id(args.measurement_id, args.measurement_number)
+        measurement.save_as_netcdf()
+        print "Created file ", measurement.scc_filename
+    else:
+        print "No files found when searching for ", search_str
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/example_scripts/convert_lilas.py	Thu Feb 16 10:27:30 2017 +0200
@@ -0,0 +1,74 @@
+""" Sample script to convert licel files to SCC netcdf format.
+
+The script assumes the following things:
+
+1. You have already installed the atmospheric_lidar module (e.g. using pip).
+2. You have create a class in a file "lilas" describing your system (described the readme file).
+   If you want to use it for a different system you need to change the script to use your own class
+
+Run the script using: python convert_lilas.py <your options>
+
+Examples
+--------
+# This will read all files starting with "l" from "my_dir".
+# It will guess the measurement ID based on the date of the files, and will assume measurement number 00.
+# For example, the new measurment id could be 20120101mb00
+python convert_lilas.py my_dir l*.
+
+# This will use the measurement id you defined.
+python convert_lilas.py my_dir l*. -m 20120204mb32  # It will create the file 20120204mb32.nc
+
+Help string
+-----------
+# To get this, run: python convert_lilas.py -h
+usage: convert_lilas.py [-h] [-m MEASUREMENT_ID] [-n MEASUREMENT_NUMBER]
+                        [directory] [searchstring]
+
+positional arguments:
+  directory             Directory with licel files.
+  searchstring          Processing system id.
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -m MEASUREMENT_ID, --measurement_id MEASUREMENT_ID
+                        The new measurement id
+  -n MEASUREMENT_NUMBER, --measurement_number MEASUREMENT_NUMBER
+                        The measurement number for the date, if no id is
+                        provided
+
+"""
+
+import glob
+import os
+import argparse
+
+from atmospheric_lidar import lilas
+
+
+if __name__ == "__main__":
+
+    # Define the command line arguments.
+    parser = argparse.ArgumentParser()
+    parser.add_argument("directory", nargs='?', help="Directory with licel files.", default='.')
+    parser.add_argument("searchstring", nargs='?', help="Processing system id.", default="*.*")
+    parser.add_argument("-m", "--measurement_id", help="The new measurement id", default=None)
+    parser.add_argument("-n", "--measurement_number", help="The measurement number for the date, if no id is provided", default="00")
+    args = parser.parse_args()
+
+
+    # Get a list of files to convert
+    search_str = os.path.join(args.directory, args.searchstring)
+    files = glob.glob(search_str)
+
+    if files:
+        # Read the files
+        print "Reading {0} files from {1}".format(len(files), args.directory)
+        measurement = lilas.LilasLidarMeasurement(files)
+
+        #Save the netcdf
+        print "Saving netcdf."
+        measurement.set_measurement_id(args.measurement_id, args.measurement_number)
+        measurement.save_as_netcdf()
+        print "Created file ", measurement.scc_filename
+    else:
+        print "No files found when searching for ", search_str
\ No newline at end of file
--- a/setup.py	Thu Feb 16 10:27:03 2017 +0200
+++ b/setup.py	Thu Feb 16 10:27:30 2017 +0200
@@ -2,23 +2,26 @@
 
 from setuptools import setup
 
-# Read the long description from the readmefile
+# Read the long description from the readmef ile
 with open("readme.rst", "rb") as f:
     long_descr = f.read().decode("utf-8")
 
 # Run setup
-setup(name='atmospheric_lidar',
-      packages=['atmospheric_lidar', ],
-      version='0.1.0',
+setup(name='atmospheric_lidar_reader',
+      packages=['atmospheric_lidar', 'atmospheric_lidar.scripts'],
+      version='0.2.0',
       description='Classes for reading raw atmospheric lidar data.',
-      long_description = long_descr,
+      long_description=long_descr,
       author='Ioannis Binietoglou',
-      author_email='binietoglou@noa.gr',
+      author_email='ioannis@inoe.ro',
       install_requires=[
-        "numpy",
-        "matplotlib",
-        "sphinx",
-        "pytest"
-    ],
-     )
-
+          "numpy",
+          "matplotlib",
+          "sphinx",
+          "pytest",
+          "netcdf4",
+      ],
+      entry_points={
+          'console_scripts': ['licel2scc = atmospheric_lidar.scripts.licel2scc:main'],
+      },
+      )

mercurial