generic.py

changeset 36
a281a26f4626
parent 35
b1146c96f727
child 37
7c76fdbdf1a3
equal deleted inserted replaced
35:b1146c96f727 36:a281a26f4626
1 # General imports
2 import datetime
3 from operator import itemgetter
4
5 # Science imports
6 import numpy as np
7 import matplotlib as mpl
8 from matplotlib.ticker import ScalarFormatter
9 from matplotlib import pyplot as plt
10 import netCDF4 as netcdf
11
12 netcdf_format = 'NETCDF3_CLASSIC' # choose one of 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC' and 'NETCDF4'
13
14
15 class BaseLidarMeasurement():
16 """ This is the general measurement object.
17 It is meant to become a general measurement object
18 independent of the input files.
19
20 Each subclass should implement the following:
21 * the import_file method.
22 * set the "extra_netcdf_parameters" variable to a dictionary that includes the appropriate parameters.
23
24 You can override the get_PT method to define a custom procedure to get ground temperature and pressure.
25 The one implemented by default is by using the MILOS meteorological station data.
26
27 """
28
29 def __init__(self, filelist = None):
30 self.info = {}
31 self.dimensions = {}
32 self.variables = {}
33 self.channels = {}
34 self.attributes = {}
35 self.files = []
36 self.dark_measurement = None
37
38 if filelist:
39 self.import_files(filelist)
40
41 def import_files(self, filelist):
42 for f in filelist:
43 self.import_file(f)
44 self.update()
45
46 def import_file(self,filename):
47 raise NotImplementedError('Importing files should be defined in the instrument-specific subclass.')
48
49 def update(self):
50 '''
51 Update the the info, variables and dimensions of the lidar measurement based
52 on the information found in the channels.
53
54 Reading of the scan_angles parameter is not implemented.
55 '''
56
57 # Initialize
58 start_time =[]
59 stop_time = []
60 points = []
61 all_time_scales = []
62 channel_name_list = []
63
64 # Get the information from all the channels
65 for channel_name, channel in self.channels.items():
66 channel.update()
67 start_time.append(channel.start_time)
68 stop_time.append(channel.stop_time)
69 points.append(channel.points)
70 all_time_scales.append(channel.time)
71 channel_name_list.append(channel_name)
72
73 # Find the unique time scales used in the channels
74 time_scales = set(all_time_scales)
75
76 # Update the info dictionary
77 self.info['start_time'] = min(start_time)
78 self.info['stop_time'] = max(stop_time)
79 self.info['duration'] = self.info['stop_time'] - self.info['start_time']
80
81 # Update the dimensions dictionary
82 self.dimensions['points'] = max(points)
83 self.dimensions['channels'] = len(self.channels)
84 # self.dimensions['scan angles'] = 1
85 self.dimensions['nb_of_time_scales'] = len(time_scales)
86
87 # Update the variables dictionary
88 # Write time scales in seconds
89 raw_Data_Start_Time = []
90 raw_Data_Stop_Time = []
91
92 for current_time_scale in list(time_scales):
93 raw_start_time = np.array(current_time_scale) - min(start_time) # Time since start_time
94 raw_start_in_seconds = np.array([t.seconds for t in raw_start_time]) # Convert in seconds
95 raw_Data_Start_Time.append(raw_start_in_seconds) # And add to the list
96 # Check if this time scale has measurements every 30 or 60 seconds.
97
98 duration = self._get_duration(raw_start_in_seconds)
99
100 raw_stop_in_seconds = raw_start_in_seconds + duration
101 raw_Data_Stop_Time.append(raw_stop_in_seconds)
102
103 self.variables['Raw_Data_Start_Time'] = raw_Data_Start_Time
104 self.variables['Raw_Data_Stop_Time'] = raw_Data_Stop_Time
105
106 # Make a dictionary to match time scales and channels
107 channel_timescales = []
108 for (channel_name, current_time_scale) in zip(channel_name_list, all_time_scales):
109 # The following lines are PEARL specific. The reason they are here is not clear.
110 # if channel_name =='1064BLR':
111 # channel_name = '1064'
112 for (ts,n) in zip(time_scales, range(len(time_scales))):
113 if current_time_scale == ts:
114 channel_timescales.append([channel_name,n])
115 self.variables['id_timescale'] = dict(channel_timescales)
116
117 def _get_duration(self, raw_start_in_seconds):
118 ''' Return the duration for a given time scale. In some files (e.g. Licel) this
119 can be specified from the files themselves. In others this must be guessed.
120
121 '''
122 # The old method, kept here for reference
123 #dt = np.mean(np.diff(raw_start_in_seconds))
124 #for d in duration_list:
125 # if abs(dt - d) <15: #If the difference of measuremetns is 10s near the(30 or 60) seconds
126 # duration = d
127
128 duration = np.diff(raw_start_in_seconds)[0]
129
130 return duration
131
132 def subset_by_channels(self, channel_subset):
133 ''' Get a measurement object containing only the channels with names
134 contained in the channel_sublet list '''
135
136 m = self.__class__() # Create an object of the same type as this one.
137 m.channels = dict([(channel, self.channels[channel]) for channel
138 in channel_subset])
139 m.update()
140 return m
141
142 def subset_by_time(self, start_time, stop_time):
143
144 if start_time > stop_time:
145 raise ValueError('Stop time should be after start time')
146
147 if (start_time < self.info['start_time']) or (stop_time > self.info['stop_time']):
148 raise ValueError('The time interval specified is not part of the measurement')
149
150 m = self.__class__() # Create an object of the same type as this one.
151 for (channel_name, channel) in self.channels.items():
152 m.channels[channel_name] = channel.subset_by_time(start_time, stop_time)
153 m.update()
154 return m
155
156 def subset_by_bins(self, b_min = 0, b_max = None):
157 """Remove some height bins from the file. This could be needed to
158 remove aquisition artifacts at the start or the end of the files.
159 """
160
161 m = self.__class__() # Create an object of the same type as this one.
162
163 for (channel_name, channel) in self.channels.items():
164 m.channels[channel_name] = channel.subset_by_bins(b_min, b_max)
165
166 m.update()
167
168 return m
169
170 def r_plot(self):
171 #Make a basic plot of the data.
172 #Should include some dictionary with params to make plot stable.
173 pass
174
175 def r_pdf(self):
176 # Create a pdf report using a basic plot and meta-data.
177 pass
178
179 def save(self):
180 #Save the current state of the object to continue the analysis later.
181 pass
182
183 def get_PT(self):
184 ''' Sets the pressure and temperature at station level .
185 The results are stored in the info dictionary.
186 '''
187
188 self.info['Temperature'] = 10.0
189 self.info['Pressure'] = 930.0
190
191 def subtract_dark(self):
192
193 if not self.dark_measurement:
194 raise IOError('No dark measurements have been imported yet.')
195
196 for (channel_name, dark_channel) in self.dark_measurement.channels.iteritems():
197 dark_profile = dark_channel.average_profile()
198 channel = self.channels[channel_name]
199
200 for measurement_time, data in channel.data.iteritems():
201 channel.data[measurement_time] = data - dark_profile
202
203 channel.update()
204
205 def save_as_netcdf(self, filename = None):
206 """Saves the measurement in the netcdf format as required by the SCC.
207 Input: filename. If no filename is provided <measurement_id>.nc will
208 be used.
209 """
210 params = self.extra_netcdf_parameters
211 needed_parameters = ['Measurement_ID', 'Temperature', 'Pressure']
212
213 for parameter in needed_parameters:
214 stored_value = self.info.get(parameter, None)
215 if stored_value is None:
216 raise ValueError('A value needs to be specified for %s' % parameter)
217
218 if not filename:
219 filename = "%s.nc" % self.info['Measurement_ID']
220
221 dimensions = {'points': 1,
222 'channels': 1,
223 'time': None,
224 'nb_of_time_scales': 1,
225 'scan_angles': 1,} # Mandatory dimensions. Time bck not implemented
226
227 global_att = {'Measurement_ID': None,
228 'RawData_Start_Date': None,
229 'RawData_Start_Time_UT': None,
230 'RawData_Stop_Time_UT': None,
231 'RawBck_Start_Date': None,
232 'RawBck_Start_Time_UT': None,
233 'RawBck_Stop_Time_UT': None,
234 'Sounding_File_Name': None,
235 'LR_File_Name': None,
236 'Overlap_File_Name': None,
237 'Location': None,
238 'System': None,
239 'Latitude_degrees_north': None,
240 'Longitude_degrees_east': None,
241 'Altitude_meter_asl': None}
242
243 channel_variables = \
244 {'channel_ID': (('channels', ), 'i'),
245 'Background_Low': (('channels', ), 'd'),
246 'Background_High': (('channels', ), 'd'),
247 'LR_Input': (('channels', ), 'i'),
248 'DAQ_Range': (('channels', ), 'd'),
249 'Depolarization_Factor': (('channels', ), 'd'), }
250
251
252 channels = self.channels.keys()
253
254 input_values = dict(self.dimensions, **self.variables)
255
256 # Add some mandatory global attributes
257 input_values['Measurement_ID'] = self.info['Measurement_ID']
258 input_values['RawData_Start_Date'] = self.info['start_time'].strftime('%Y%m%d')
259 input_values['RawData_Start_Time_UT'] = self.info['start_time'].strftime('%H%M%S')
260 input_values['RawData_Stop_Time_UT'] = self.info['stop_time'].strftime('%H%M%S')
261
262 # Add some optional global attributes
263 input_values['System'] = params.general_parameters['System']
264 input_values['Latitude_degrees_north'] = params.general_parameters['Latitude_degrees_north']
265 input_values['Longitude_degrees_east'] = params.general_parameters['Longitude_degrees_east']
266 input_values['Altitude_meter_asl'] = params.general_parameters['Altitude_meter_asl']
267
268 # Open a netCDF4 file
269 f = netcdf.Dataset(filename,'w', format = netcdf_format) # the format is specified in the begining of the file
270
271 # Create the dimensions in the file
272 for (d,v) in dimensions.iteritems():
273 v = input_values.pop(d, v)
274 f.createDimension(d,v)
275
276 # Create global attributes
277 for (attrib,value) in global_att.iteritems():
278 val = input_values.pop(attrib,value)
279 if val:
280 setattr(f, attrib, val)
281
282 """ Variables """
283 # Write the values of fixes channel parameters
284 for (var,t) in channel_variables.iteritems():
285 temp_v = f.createVariable(var,t[1],t[0])
286 for (channel, n) in zip(channels, range(len(channels))):
287 temp_v[n] = params.channel_parameters[channel][var]
288
289 # Write the id_timescale values
290 temp_id_timescale = f.createVariable('id_timescale','i',('channels',))
291 for (channel, n) in zip(channels, range(len(channels))):
292 temp_id_timescale[n] = self.variables['id_timescale'][channel]
293
294 # Laser pointing angle
295 temp_v = f.createVariable('Laser_Pointing_Angle','d',('scan_angles',))
296 temp_v[:] = params.general_parameters['Laser_Pointing_Angle']
297
298 # Molecular calculation
299 temp_v = f.createVariable('Molecular_Calc','i')
300 temp_v[:] = params.general_parameters['Molecular_Calc']
301
302 # Laser pointing angles of profiles
303 temp_v = f.createVariable('Laser_Pointing_Angle_of_Profiles','i',('time','nb_of_time_scales'))
304 for (time_scale,n) in zip(self.variables['Raw_Data_Start_Time'],
305 range(len(self.variables['Raw_Data_Start_Time']))):
306 temp_v[:len(time_scale), n] = 0 # The lidar has only one laser pointing angle
307
308 # Raw data start/stop time
309 temp_raw_start = f.createVariable('Raw_Data_Start_Time','i',('time','nb_of_time_scales'))
310 temp_raw_stop = f.createVariable('Raw_Data_Stop_Time','i',('time','nb_of_time_scales'))
311 for (start_time, stop_time,n) in zip(self.variables['Raw_Data_Start_Time'],
312 self.variables['Raw_Data_Stop_Time'],
313 range(len(self.variables['Raw_Data_Start_Time']))):
314 temp_raw_start[:len(start_time),n] = start_time
315 temp_raw_stop[:len(stop_time),n] = stop_time
316
317 #Laser shots
318 temp_v = f.createVariable('Laser_Shots','i',('time','channels'))
319 for (channel,n) in zip(channels, range(len(channels))):
320 time_length = len(self.variables['Raw_Data_Start_Time'][self.variables['id_timescale'][channel]])
321 # Array slicing stoped working as usual ex. temp_v[:10] = 100 does not work. ??? np.ones was added.
322 temp_v[:time_length, n] = np.ones(time_length) * params.channel_parameters[channel]['Laser_Shots']
323
324 #Raw lidar data
325 temp_v = f.createVariable('Raw_Lidar_Data','d',('time', 'channels','points'))
326 for (channel,n) in zip(channels, range(len(channels))):
327 c = self.channels[channel]
328 temp_v[:len(c.time),n, :c.points] = c.matrix
329
330 self.add_dark_measurements_to_netcdf(f, channels)
331
332 #Pressure at lidar station
333 temp_v = f.createVariable('Pressure_at_Lidar_Station','d')
334 temp_v[:] = self.info['Pressure']
335
336 #Temperature at lidar station
337 temp_v = f.createVariable('Temperature_at_Lidar_Station','d')
338 temp_v[:] = self.info['Temperature']
339
340 self.save_netcdf_extra(f)
341 f.close()
342
343 def add_dark_measurements_to_netcdf(self, f, channels):
344
345 # Get dark measurements. If it is not given in self.dark_measurement
346 # try to get it using the get_dark_measurements method. If none is found
347 # return without adding something.
348 if self.dark_measurement is None:
349 self.dark_measurement = self.get_dark_measurements()
350
351 if self.dark_measurement is None:
352 return
353
354 dark_measurement = self.dark_measurement
355
356 # Calculate the length of the time_bck dimensions
357 number_of_profiles = [len(c.time) for c in dark_measurement.channels.values()]
358 max_number_of_profiles = np.max(number_of_profiles)
359
360 # Create the dimension
361 f.createDimension('time_bck', max_number_of_profiles)
362
363 # Save the dark measurement data
364 temp_v = f.createVariable('Background_Profile','d',('time_bck', 'channels', 'points'))
365 for (channel,n) in zip(channels, range(len(channels))):
366 c = dark_measurement.channels[channel]
367 temp_v[:len(c.time),n, :c.points] = c.matrix
368
369 # Dark profile start/stop time
370 temp_raw_start = f.createVariable('Raw_Bck_Start_Time','i',('time_bck','nb_of_time_scales'))
371 temp_raw_stop = f.createVariable('Raw_Bck_Stop_Time','i',('time_bck','nb_of_time_scales'))
372 for (start_time, stop_time,n) in zip(dark_measurement.variables['Raw_Data_Start_Time'],
373 dark_measurement.variables['Raw_Data_Stop_Time'],
374 range(len(dark_measurement.variables['Raw_Data_Start_Time']))):
375 temp_raw_start[:len(start_time),n] = start_time
376 temp_raw_stop[:len(stop_time),n] = stop_time
377
378 # Dark measurement start/stop time
379 f.RawBck_Start_Date = dark_measurement.info['start_time'].strftime('%Y%m%d')
380 f.RawBck_Start_Time_UT = dark_measurement.info['start_time'].strftime('%H%M%S')
381 f.RawBck_Stop_Time_UT = dark_measurement.info['stop_time'].strftime('%H%M%S')
382
383
384
385 def save_netcdf_extra(self, f):
386 pass
387
388 def _gettime(self, date_str, time_str):
389 t = datetime.datetime.strptime(date_str+time_str,'%d/%m/%Y%H.%M.%S')
390 return t
391
392 def plot(self):
393 for channel in self.channels:
394 self.channels[channel].plot(show_plot = False)
395 plt.show()
396
397 def get_dark_measurements(self):
398 return None
399
400 @property
401 def mean_time(self):
402 start_time = self.info['start_time']
403 stop_time = self.info['stop_time']
404 dt = stop_time - start_time
405 t_mean = start_time + dt / 2
406 return t_mean
407
408
409 class LidarChannel:
410
411 def __init__(self, channel_parameters):
412 c = 299792458 # Speed of light
413 self.wavelength = channel_parameters['name']
414 self.name = str(self.wavelength)
415 self.binwidth = float(channel_parameters['binwidth']) # in microseconds
416 self.data = {}
417 self.resolution = self.binwidth * c / 2
418 self.z = np.arange(len(channel_parameters['data'])) * self.resolution + self.resolution / 2.0 # Change: add half bin in the z
419 self.points = len(channel_parameters['data'])
420 self.rc = []
421 self.duration = 60
422
423 def calculate_rc(self, idx_min = 4000, idx_max = 5000):
424 background = np.mean(self.matrix[:,idx_min:idx_max], axis = 1) # Calculate the background from 30000m and above
425 self.rc = (self.matrix.transpose()- background).transpose() * (self.z **2)
426
427
428 def update(self):
429 self.start_time = min(self.data.keys())
430 self.stop_time = max(self.data.keys()) + datetime.timedelta(seconds = self.duration)
431 self.time = tuple(sorted(self.data.keys()))
432 sorted_data = sorted(self.data.iteritems(), key=itemgetter(0))
433 self.matrix = np.array(map(itemgetter(1),sorted_data))
434
435 def _nearest_dt(self,dtime):
436 margin = datetime.timedelta(seconds = 300)
437 if ((dtime + margin) < self.start_time)| ((dtime - margin) > self.stop_time):
438 print "Requested date not covered in this file"
439 raise
440 dt = abs(self.time - np.array(dtime))
441 dtmin = min(dt)
442
443 if dtmin > datetime.timedelta(seconds = 60):
444 print "Nearest profile more than 60 seconds away. dt = %s." % dtmin
445 ind_t = np.where(dt == dtmin)
446 ind_a= ind_t[0]
447 if len(ind_a) > 1:
448 ind_a = ind_a[0]
449 chosen_time = self.time[ind_a]
450 return chosen_time, ind_a
451
452 def subset_by_time(self, start_time, stop_time):
453
454 time_array = np.array(self.time)
455 condition = (time_array >= start_time) & (time_array <= stop_time)
456
457 subset_time = time_array[condition]
458 subset_data = dict([(c_time, self.data[c_time]) for c_time in subset_time])
459
460 #Create a list with the values needed by channel's __init__()
461 parameter_values = {'name': self.wavelength,
462 'binwidth': self.binwidth,
463 'data': subset_data[subset_time[0]],}
464
465 # We should use __class__ instead of class name, so that this works properly
466 # with subclasses
467 # Ex: c = self.__class__(parameters_values)
468 # This does not currently work with Licel files though
469 c = LidarChannel(parameter_values)
470 c.data = subset_data
471 c.update()
472 return c
473
474 def subset_by_bins(self, b_min = 0, b_max = None):
475 """Remove some height bins from the file. This could be needed to
476 remove aquisition artifacts at the start or the end of the files.
477 """
478
479 subset_data = {}
480
481 for ctime, cdata in self.data.items():
482 subset_data[ctime] = cdata[b_min:b_max]
483
484 #Create a list with the values needed by channel's __init__()
485 parameters_values = {'name': self.wavelength,
486 'binwidth': self.binwidth,
487 'data': subset_data[subset_data.keys()[0]],} # We just need any array with the correct length
488
489 c = LidarChannel(parameters_values)
490 c.data = subset_data
491 c.update()
492 return c
493
494 def profile(self,dtime, signal_type = 'rc'):
495 t, idx = self._nearest_dt(dtime)
496 if signal_type == 'rc':
497 data = self.rc
498 else:
499 data = self.matrix
500
501 prof = data[idx,:][0]
502 return prof, t
503
504 def get_slice(self, starttime, endtime, signal_type = 'rc'):
505 if signal_type == 'rc':
506 data = self.rc
507 else:
508 data = self.matrix
509 tim = np.array(self.time)
510 starttime = self._nearest_dt(starttime)[0]
511 endtime = self._nearest_dt(endtime)[0]
512 condition = (tim >= starttime) & (tim <= endtime)
513 sl = data[condition, :]
514 t = tim[condition]
515 return sl,t
516
517 def profile_for_duration(self, tim, duration = datetime.timedelta(seconds = 0), signal_type = 'rc'):
518 """ Calculates the profile around a specific time (tim). """
519 starttime = tim - duration/2
520 endtime = tim + duration/2
521 d,t = self.get_slice(starttime, endtime, signal_type = signal_type)
522 prof = np.mean(d, axis = 0)
523 tmin = min(t)
524 tmax = max(t)
525 tav = tmin + (tmax-tmin)/2
526 return prof,(tav, tmin,tmax)
527
528 def average_profile(self):
529 """ Return the average profile (NOT range corrected) for all the duration of the measurement. """
530 prof = np.mean(self.matrix, axis = 0)
531 return prof
532
533 def plot(self, signal_type = 'rc', filename = None, zoom = [0,12000,0,-1], show_plot = True, cmap = plt.cm.jet, z0 = None, title = None, vmin = 0, vmax = 1.3 * 10 ** 7):
534 #if filename is not None:
535 # matplotlib.use('Agg')
536
537 fig = plt.figure()
538 ax1 = fig.add_subplot(111)
539 self.draw_plot(ax1, cmap = cmap, signal_type = signal_type, zoom = zoom, z0 = z0, vmin = vmin, vmax = vmax)
540
541 if title:
542 ax1.set_title(title)
543 else:
544 ax1.set_title("%s signal - %s" % (signal_type.upper(), self.name))
545
546 if filename is not None:
547 pass
548 #plt.savefig(filename)
549 else:
550 if show_plot:
551 plt.show()
552 #plt.close() ???
553
554 def draw_plot(self,ax1, cmap = plt.cm.jet, signal_type = 'rc',
555 zoom = [0,12000,0,-1], z0 = None,
556 add_colorbar = True, cmap_label = 'a.u.', cb_format = None,
557 vmin = 0, vmax = 1.3 * 10 ** 7):
558
559 if signal_type == 'rc':
560 if len(self.rc) == 0:
561 self.calculate_rc()
562 data = self.rc
563 else:
564 data = self.matrix
565
566 hmax_idx = self.index_at_height(zoom[1])
567
568 # If z0 is given, then the plot is a.s.l.
569 if z0:
570 ax1.set_ylabel('Altitude a.s.l. [km]')
571 else:
572 ax1.set_ylabel('Altitude a.g.l. [km]')
573 z0 = 0
574
575 ax1.set_xlabel('Time UTC [hh:mm]')
576 #y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
577 #dateFormatter = mpl.dates.DateFormatter('%H.%M')
578 #hourlocator = mpl.dates.HourLocator()
579
580 #dayFormatter = mpl.dates.DateFormatter('\n\n%d/%m')
581 #daylocator = mpl.dates.DayLocator()
582 hourFormatter = mpl.dates.DateFormatter('%H:%M')
583 hourlocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
584
585
586 #ax1.axes.xaxis.set_major_formatter(dayFormatter)
587 #ax1.axes.xaxis.set_major_locator(daylocator)
588 ax1.axes.xaxis.set_major_formatter(hourFormatter)
589 ax1.axes.xaxis.set_major_locator(hourlocator)
590
591
592 ts1 = mpl.dates.date2num(self.start_time)
593 ts2 = mpl.dates.date2num(self.stop_time)
594
595
596 im1 = ax1.imshow(data.transpose()[zoom[0]:hmax_idx,zoom[2]:zoom[3]],
597 aspect = 'auto',
598 origin = 'lower',
599 cmap = cmap,
600 vmin = vmin,
601 #vmin = data[:,10:400].max() * 0.1,
602 vmax = vmax,
603 #vmax = data[:,10:400].max() * 0.9,
604 extent = [ts1,ts2,self.z[zoom[0]]/1000.0 + z0/1000., self.z[hmax_idx]/1000.0 + z0/1000.],
605 )
606
607 if add_colorbar:
608 if cb_format:
609 cb1 = plt.colorbar(im1, format = cb_format)
610 else:
611 cb1 = plt.colorbar(im1)
612 cb1.ax.set_ylabel(cmap_label)
613
614 # Make the ticks of the colorbar smaller, two points smaller than the default font size
615 cb_font_size = mpl.rcParams['font.size'] - 2
616 for ticklabels in cb1.ax.get_yticklabels():
617 ticklabels.set_fontsize(cb_font_size)
618 cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
619
620
621 def draw_plot_new(self, ax1, cmap = plt.cm.jet, signal_type = 'rc',
622 zoom = [0, 12000, 0, None], z0 = None,
623 add_colorbar = True, cmap_label = 'a.u.',
624 cb_format = None, power_limits = (-2, 2),
625 date_labels = False,
626 vmin = 0, vmax = 1.3 * 10 ** 7):
627
628 if signal_type == 'rc':
629 if len(self.rc) == 0:
630 self.calculate_rc()
631 data = self.rc
632 else:
633 data = self.matrix
634
635 hmax_idx = self.index_at_height(zoom[1])
636 hmin_idx = self.index_at_height(zoom[0])
637
638 # If z0 is given, then the plot is a.s.l.
639 if z0:
640 ax1.set_ylabel('Altitude a.s.l. [km]')
641 else:
642 ax1.set_ylabel('Altitude a.g.l. [km]')
643 z0 = 0
644
645 ax1.set_xlabel('Time UTC [hh:mm]')
646 #y axis in km, xaxis /2 to make 30s measurements in minutes. Only for 1064
647 #dateFormatter = mpl.dates.DateFormatter('%H.%M')
648 #hourlocator = mpl.dates.HourLocator()
649
650
651 if date_labels:
652 dayFormatter = mpl.dates.DateFormatter('%H:%M\n%d/%m/%Y')
653 daylocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
654 ax1.axes.xaxis.set_major_formatter(dayFormatter)
655 ax1.axes.xaxis.set_major_locator(daylocator)
656 else:
657 hourFormatter = mpl.dates.DateFormatter('%H:%M')
658 hourlocator = mpl.dates.AutoDateLocator(minticks = 3, maxticks = 8, interval_multiples=True)
659 ax1.axes.xaxis.set_major_formatter(hourFormatter)
660 ax1.axes.xaxis.set_major_locator(hourlocator)
661
662
663 # Get the values of the time axis
664 dt = datetime.timedelta(seconds = self.duration)
665
666 time_cut = self.time[zoom[2]:zoom[3]]
667 time_last = time_cut[-1] + dt # The last element needed for pcolormesh
668
669 time_all = time_cut + (time_last,)
670
671 t_axis = mpl.dates.date2num(time_all)
672
673 # Get the values of the z axis
674 z_cut = self.z[hmin_idx:hmax_idx] - self.resolution / 2.
675 z_last = z_cut[-1] + self.resolution
676
677 z_axis = np.append(z_cut, z_last) / 1000. + z0 / 1000. # Convert to km
678
679 # Plot
680 im1 = ax1.pcolormesh(t_axis, z_axis, data.T[hmin_idx:hmax_idx, zoom[2]:zoom[3]],
681 cmap = cmap,
682 vmin = vmin,
683 vmax = vmax,
684 )
685
686 if add_colorbar:
687 if cb_format:
688 cb1 = plt.colorbar(im1, format = cb_format)
689 else:
690 cb_formatter = ScalarFormatter()
691 cb_formatter.set_powerlimits(power_limits)
692 cb1 = plt.colorbar(im1, format = cb_formatter)
693 cb1.ax.set_ylabel(cmap_label)
694
695 # Make the ticks of the colorbar smaller, two points smaller than the default font size
696 cb_font_size = mpl.rcParams['font.size'] - 2
697 for ticklabels in cb1.ax.get_yticklabels():
698 ticklabels.set_fontsize(cb_font_size)
699 cb1.ax.yaxis.get_offset_text().set_fontsize(cb_font_size)
700
701 def index_at_height(self, height):
702 idx = np.array(np.abs(self.z - height).argmin())
703 if idx.size > 1:
704 idx =idx[0]
705 return idx
706
707 def netcdf_from_files(LidarClass, filename, files, channels, measurement_ID):
708 #Read the lidar files and select channels
709 temp_m = LidarClass(files)
710 m = temp_m.subset_by_channels(channels)
711 m.get_PT()
712 m.info['Measurement_ID'] = measurement_ID
713 m.save_as_netcdf(filename)
714

mercurial