OB.DAAC Logo
NASA Logo
Ocean Color Science Software

ocssw V2022
multilevel_processor.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 
3 """
4 Program to perform multilevel processing (previously known as the
5 seadas_processor and sometimes referred to as the 'uber' processor).
6 """
7 
8 try:
9  import configparser
10 except ImportError:
11  import ConfigParser as configparser
12 
13 import datetime
14 import logging
15 import optparse
16 import os
17 import re
18 import subprocess
19 import sys
20 import tarfile
21 import time
22 import traceback
23 
26 import mlp.mlp_utils as mlp_utils
27 import mlp.benchmark_timer as benchmark_timer
28 import seadasutils.MetaUtils as MetaUtils
29 import mlp.name_finder_utils as name_finder_utils
30 import mlp.obpg_data_file as obpg_data_file
31 import seadasutils.ProcUtils as ProcUtils
32 import mlp.processor as processor
33 import mlp.processing_rules as processing_rules
34 import mlp.uber_par_file_reader as uber_par_file_reader
35 #import product
36 
37 __version__ = '1.0.6'
38 
39 __author__ = 'melliott'
40 
41 class ProcessorConfig(object):
42  """
43  Configuration data for the program which needs to be widely available.
44  """
45  SECS_PER_DAY = 86400
46  def __init__(self, hidden_dir, ori_dir, verbose, overwrite, use_existing,
47  deletefiles, out_dir=None):
48  self.prog_name = os.path.basename(sys.argv[0])
49 
50  if not os.path.exists(hidden_dir):
51  try:
52  os.mkdir(hidden_dir)
53  except OSError:
54  if sys.exc_info()[1].find('Permission denied:') != -1:
55  log_and_exit('Error! Unable to create directory {0}'.\
56  format(hidden_dir))
57  self.hidden_dir = hidden_dir
58  self.original_dir = ori_dir
59  self.verbose = verbose
60  self.deletefiles = deletefiles
61  self.overwrite = overwrite
62  self.use_existing = use_existing
63  self.get_anc = True
64  # self.tar_filename = tar_name
65  # self.timing = timing
66  if out_dir:
67  self.output_dir = out_dir
68  self.output_dir_is_settable = False
69  else:
70  self.output_dir = '.' # default to current dir, change later if
71  # specified in par file or command line
72  self.output_dir_is_settable = True
73  cfg_file_path = os.path.join(self.hidden_dir, 'seadas_ocssw.cfg')
74  if os.path.exists(cfg_file_path):
75  self._read_saved_options(cfg_file_path)
76  else:
77  self.max_file_age = 2592000 # number of seconds in 30 days
78  self._write_default_cfg_file(cfg_file_path)
79  ProcessorConfig._instance = self
80 
81  def _read_saved_options(self, cfg_path):
82  """
83  Gets options stored in the program's configuration file.
84  """
85  try:
86  cfg_parser = configparser.ConfigParser()
87  cfg_parser.read(cfg_path)
88  try:
89  self.max_file_age = ProcessorConfig.SECS_PER_DAY * \
90  int(cfg_parser.get('main',
91  'par_file_age').\
92  split(' ', 2)[0])
93  except configparser.NoSectionError as nse:
94  print ('nse: ' + str(nse))
95  print ('sys.exc_info(): ')
96  for msg in sys.exc_info():
97  print (' ' + str(msg))
98  log_and_exit('Error! Configuration file has no "main" ' +
99  'section.')
100  except configparser.NoOptionError:
101  log_and_exit('Error! The "main" section of the configuration ' +
102  'file does not specify a "par_file_age".')
103  except configparser.MissingSectionHeaderError:
104  log_and_exit('Error! Bad configuration file, no section headers ' +
105  'found.')
106 
107  def _set_temp_dir(self):
108  """
109  Sets the value of the temporary directory.
110  """
111  if os.path.exists('/tmp') and os.path.isdir('/tmp') and \
112  os.access('/tmp', os.W_OK):
113  return '/tmp'
114  else:
115  cwd = os.getcwd()
116  if os.path.exists(cwd) and os.path.isdir(cwd) and \
117  os.access(cwd, os.W_OK):
118  return cwd
119  else:
120  log_and_exit('Error! Unable to establish a temporary ' +
121  'directory.')
122 
123  def _write_default_cfg_file(self, cfg_path):
124  """
125  Writes out a configuration file using default values.
126  """
127  with open(cfg_path, 'wt') as cfg_file:
128  cfg_file.write('[main]\n')
129  cfg_file.write('par_file_age=30 # units are days\n')
130 
131 class Sensor(object):
132  """
133  Sensor contains the recipe and procssing method for general sensors.
134  """
135  def __init__(self):
136  self.name = 'general'
137  self.rules_dict = {
138  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
139  self.run_bottom_error, False),
140  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
141  self.run_l1brsgen, False),
142  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
143  self.run_l2brsgen, False),
144  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
145  # self.run_l1mapgen, False),
146  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
147  # self.run_l2mapgen, False),
148  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
149  self.run_l1b, False),
150  'l2gen': processing_rules.build_rule('l2gen', ['l1'], self.run_l2gen,
151  False),
152  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
153  self.run_l2extract, False),
154  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
155  True),
156  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
157  True),
158  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
159  self.run_l3mapgen, False)
160  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
161  # False)
162  }
163  # self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
164  # 'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
165  # 'l3mapgen', 'smigen']
166  self.rules_order = ['level 1a', 'l1brsgen', 'level 1b', 'l2gen',
167  'l2extract', 'l2brsgen', 'l2bin', 'l3bin',
168  'l3mapgen']
169  self.name = 'general'
170  self.require_geo = False
172  self.recipe = processing_rules.RuleSet('General rules', self.rules_dict, self.rules_order)
173 
174  def run_bottom_error(self, proc):
175  """
176  Exits with an error message when there is an attempt to process a source
177  file at the lowest level of a rule chain.
178  """
179  err_msg = 'Error! Attempting to create {0} product, but no creation program is known.'.format(proc.target_type)
180  log_and_exit(err_msg)
181 
182  def run_l1b(self, proc):
183  """
184  Sets up and runs an executable program.
185  """
186  #todo: replace l1bgen with the appropriate proc.whatever
187  prog = os.path.join(proc.ocssw_bin, 'l1bgen_generic')
188  args = ['ifile={}'.format(proc.input_file),'ofile={}'.format(proc.output_file)]
189  if not proc.geo_file is None:
190  args.append('geofile={}'.format(proc.geo_file))
191  args.extend(get_options(proc.par_data))
192  cmd = [prog]
193  cmd.extend(args)
194  # cmd = [prog, args]
195  return execute_command(cmd)
196 
197  def run_l1brsgen(self, proc):
198  """
199  Runs the l1brsgen executable.
200  """
201  # l1brs_suffixes = {'0':'L1_BRS', '1':'L1_BRS', '2':'ppm',
202  # '3':'flt', '4':'png',
203  # 'hdf4': 'hdf', 'bin': 'bin', 'png': 'png',
204  # 'ppm': 'ppm'}
205  prog = os.path.join(proc.ocssw_bin, 'l1brsgen')
206  opts = get_options(proc.par_data)
207  cmd = [prog]
208  cmd.extend(opts)
209  cmd.extend(['ifile={}'.format(proc.input_file),'ofile={}'.format(proc.output_file)])
210  if proc.geo_file:
211  cmd.append('geofile={}'.format(proc.geo_file))
212 
213  # logging.debug('Executing: "%s"', cmd)
214  logging.debug('Executing: "%s"', " ".join(str(x) for x in cmd))
215  status = execute_command(cmd)
216  return status
217 
218  def run_l2bin(self, proc):
219  """
220  Set up for and perform L2 binning.
221  """
222  prog = os.path.join(proc.ocssw_bin, 'l2bin')
223  if not os.path.exists(prog):
224  print ("Error! Cannot find executable needed for {0}".\
225  format(proc.rule_set.rules[proc.target_type].action))
226  args = ['infile={}'.format(proc.input_file),
227  'ofile={}'.format(proc.output_file)]
228  args.extend(get_options(proc.par_data))
229  cmd = [prog]
230  cmd.extend(args)
231  # cmd = [prog, args]
232  # logging.debug('Running l2bin cmd: {}'.format(" ".join(str(x) for x in cmd)))
233  if cfg_data.verbose:
234  print ('l2bin cmd: {}'.format(" ".join(str(x) for x in cmd)))
235  ret_val = execute_command(cmd)
236  if ret_val != 0:
237  if os.path.exists(proc.output_file):
238  msg = '-I- The l2bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(ret_val, proc.output_file)
239  logging.info(msg)
240  ret_val = 0
241  else:
242  msg = '-I- The l2bin program produced a bin file with no data. No further processing will be done.'
243  sys.exit(msg)
244  return ret_val
245 
246  def run_l2brsgen(self, proc):
247  """
248  Runs the l2brsgen executable.
249  """
250  logging.debug("In run_l2brsgen")
251  prog = os.path.join(proc.ocssw_bin, 'l2brsgen')
252  opts = get_options(proc.par_data)
253  cmd = [prog]
254  cmd.extend(opts)
255  cmd.extend(['ifile={}'.format(proc.input_file), 'ofile={}'.format(proc.output_file)])
256  logging.debug('Executing: "%s"', " ".join(str(x) for x in cmd))
257  status = execute_command(cmd)
258  return status
259 
260  def run_l2extract(self, proc):
261  """
262  Set up and run l2extract.
263  """
264  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and \
265  'NElon' in proc.par_data and 'NElat' in proc.par_data:
266  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
267  if (start_line is None) or (end_line is None) or (start_pixel is None) \
268  or (end_pixel is None):
269  err_msg = 'Error! Could not compute coordinates for l2extract.'
270  log_and_exit(err_msg)
271  l2extract_prog = os.path.join(proc.ocssw_bin, 'l2extract')
272  l2extract_cmd = [l2extract_prog, proc.input_file,
273  str(start_pixel), str(end_pixel),
274  str(start_line), str(end_line), '1', '1',
275  proc.output_file]
276  logging.debug('Executing l2extract command: {}'.format(" ".join(str(x) for x in l2extract_cmd)))
277  status = execute_command(l2extract_cmd)
278  return status
279  else:
280  err_msg = 'Error! Geographical coordinates not specified for l2extract.'
281  log_and_exit(err_msg)
282 
283  def run_l2gen(self, proc):
284  """
285  Set up for and perform L2 processing.
286  """
287  if cfg_data.get_anc:
288  getanc_prog = build_executable_path('getanc')
289  getanc_cmd = [getanc_prog, proc.input_file]
290  logging.debug('running getanc command: {}'.format(" ".join(str(x) for x in getanc_cmd)))
291  execute_command(getanc_cmd)
292  l2gen_prog = os.path.join(proc.ocssw_bin, 'l2gen')
293  if not os.path.exists(l2gen_prog):
294  print ("Error! Cannot find executable needed for {0}".\
295  format(proc.rule_set.rules[proc.target_type].action))
296  par_name = build_l2gen_par_file(proc.par_data, proc.input_file,
297  proc.geo_file, proc.output_file)
298  logging.debug('L2GEN_FILE=' + proc.output_file)
299 
300  args = 'par={}/{}'.format(cfg_data.original_dir, par_name)
301  l2gen_cmd = [l2gen_prog, args]
302  if cfg_data.verbose or DEBUG:
303  logging.debug('l2gen cmd: {}'.format(" ".join(str(x) for x in l2gen_cmd)))
304  return execute_command(l2gen_cmd)
305 
306  def run_l3bin(self, proc):
307  """
308  Set up and run the l3Bin program
309  """
310  prog = os.path.join(proc.ocssw_bin, 'l3bin')
311  if not os.path.exists(prog):
312  print ("Error! Cannot find executable needed for {0}".\
313  format(proc.rule_set.rules[proc.target_type].action))
314  args = ['ifile={}'.format(proc.input_file)]
315  for key in proc.par_data:
316  if (key != 'odir') and (key != 'ofile') and not key.lower() in FILE_USE_OPTS:
317  args.append("{}={}".format(key,proc.par_data[key]))
318  args.append("in={}".format(proc.input_file))
319  args.append("out={}".format(proc.output_file))
320  cmd = [prog]
321  cmd.extend(args)
322  # cmd = [prog, args]
323  logging.debug('Executing l3bin command: {}'.format(" ".join(str(x) for x in cmd)))
324  ret_val = execute_command(cmd)
325  if ret_val != 0:
326  if os.path.exists(proc.output_file):
327  msg = '-I- The l3bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(
328  ret_val, proc.output_file)
329  logging.info(msg)
330  ret_val = 0
331  else:
332  msg = "-I- The l3bin program produced a bin file with no data. No further processing will be done."
333  sys.exit(msg)
334  return ret_val
335 
336  def run_l3mapgen(self, proc):
337  """
338  Set up and run the l3mapgen program.
339  """
340  prog = os.path.join(proc.ocssw_bin, 'l3mapgen')
341  if not os.path.exists(prog):
342  print ("Error! Cannot find executable needed for {0}".\
343  format(proc.rule_set.rules[proc.target_type].action))
344  args = ['ifile={}'.format(proc.input_file)]
345  for key in proc.par_data:
346  if (key != 'odir') and (key != 'ofile') and not key.lower() in FILE_USE_OPTS:
347  args.append('{}={}'.format(key, proc.par_data[key]))
348  args.append('ofile={}'.format(proc.output_file))
349  cmd = [prog]
350  cmd.extend(args)
351  # cmd = [prog, args]
352  logging.debug('Executing l3mapgen command: "%s"', " ".join(str(x) for x in cmd))
353  return execute_command(cmd)
354 
355  # def run_smigen(self, proc):
356  # """
357  # Set up for and perform SMI (Standard Mapped Image) generation.
358  # """
359  # status = None
360  # prog = os.path.join(proc.ocssw_bin, 'smigen')
361  # if not os.path.exists(prog):
362  # print ("Error! Cannot find executable needed for {0}".\
363  # format(proc.rule_set.rules[proc.target_type].action))
364  # if 'prod' in proc.par_data:
365  # args = 'ifile=' + proc.input_file + ' ofile=' + proc.output_file + \
366  # ' prod=' + proc.par_data['prod']
367  # cmd = ' '.join([prog, args])
368  # for key in proc.par_data:
369  # if (key != 'prod') and not (key.lower() in FILE_USE_OPTS):
370  # args += ' ' + key + '=' + proc.par_data[key]
371  # logging.debug('\nRunning smigen command: ' + cmd)
372  # status = execute_command(cmd)
373  # else:
374  # err_msg = 'Error! No product specified for smigen.'
375  # log_and_exit(err_msg)
376  # return status
377 
379  """
380  Sensor GOCI contains GOCI specific recipe and procssing methods.
381  """
382  def __init__(self):
383  self.name = 'goci'
384  self.rules_dict = {
385  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
386  self.run_bottom_error, False),
387  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
388  self.run_l1brsgen, False),
389  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
390  self.run_l2brsgen, False),
391  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
392  # self.run_l1mapgen, False),
393  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
394  # self.run_l2mapgen, False),
395  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
396  self.run_l1b, False),
397  'l2gen': processing_rules.build_rule('l2gen', ['level 1b'], self.run_l2gen,
398  False),
399  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
400  self.run_l2extract, False),
401  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
402  True),
403  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
404  True),
405  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
406  self.run_l3mapgen, False)
407  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
408  # False)
409  }
410  # self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
411  # 'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
412  # 'l3mapgen', 'smigen']
413  self.rules_order = ['level 1a', 'l1brsgen', 'level 1b', 'l2gen',
414  'l2extract', 'l2brsgen', 'l2bin', 'l3bin',
415  'l3mapgen']
416  self.require_geo = False
418  self.recipe = processing_rules.RuleSet('GOCI rules', self.rules_dict, self.rules_order)
419 
421  """
422  Sensor HAWKEYE contains HAWKEYE specific recipe and procssing methods.
423  """
424  def __init__(self):
425  self.name = 'hawkeye'
426  self.rules_dict = {
427  'level 1a': processing_rules.build_rule('level 1a', ['nothing lower'],
428  self.run_bottom_error, False),
429  'l1brsgen': processing_rules.build_rule('l1brsgen', ['level 1a', 'geo'],
430  self.run_l1brsgen, False),
431  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['level 1a', 'geo'],
432  # self.run_l1mapgen, False),
433  'geo': processing_rules.build_rule('geo', ['level 1a'],
434  self.run_geo, False),
435  'l2gen': processing_rules.build_rule('l2gen', ['level 1a', 'geo'],
436  self.run_l2gen, False),
437  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
438  self.run_l2extract, False),
439  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
440  self.run_l2brsgen, False),
441  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
442  # self.run_l2mapgen, False),
443  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
444  True),
445  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
446  True),
447  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
448  self.run_l3mapgen, False, False),
449  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
450  # False)
451  }
452  # self.rules_order = ['level 1a', 'geo', 'l1brsgen',
453  # 'l1mapgen','l2gen', 'l2extract', 'l2bin',
454  # 'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen', 'smigen']
455  self.rules_order = ['level 1a', 'geo', 'l1brsgen',
456  'l2gen', 'l2extract', 'l2bin',
457  'l2brsgen', 'l3bin', 'l3mapgen']
458  self.require_geo = True
460  self.recipe = processing_rules.RuleSet('HAWKEYE Rules', self.rules_dict, self.rules_order)
461 
462  def run_geo(self, proc):
463  """
464  Set up and run the geolocate_hawkeye program, returning the exit status of the run.
465  """
466  logging.debug('In run_geolocate_hawkeye')
467  prog = build_executable_path('geolocate_hawkeye')
468 
471  if not prog:
472  err_msg = 'Error! Cannot find program geolocate_hawkeye.'
473  logging.info(err_msg)
474  sys.exit(err_msg)
475  args = [proc.input_file, proc.output_file]
476  args.extend(get_options(proc.par_data))
477  cmd = [prog]
478  cmd.extend(args)
479  # cmd = [prog, args]
480  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
481  return execute_command(cmd)
482 
484  """
485  Sensor MERIS contains MERIS specific recipe and processing methods.
486 
487  Rule format:
488  target type (string), source types (list of strings), batch processing
489  flag (Boolean), action to take (function name)
490  """
491  def __init__(self):
492  self.name = 'meris'
493  self.rules_dict = {
494  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
495  self.run_bottom_error, False),
496  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
497  self.run_l1brsgen, False),
498  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
499  self.run_l2brsgen, False),
500  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
501  # self.run_l1mapgen, False),
502  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
503  # self.run_l2mapgen, False),
504  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
505  self.run_l1b, False),
506  'l2gen': processing_rules.build_rule('l2gen', ['level 1b'], self.run_l2gen,
507  False),
508  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
509  self.run_l2extract, False),
510  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
511  True),
512  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
513  True),
514  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
515  self.run_l3mapgen, False),
516  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
517  # False)
518  }
519  # self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
520  # 'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
521  # 'l3mapgen', 'smigen']
522  self.rules_order = ['level 1a', 'l1brsgen', 'level 1b', 'l2gen',
523  'l2extract', 'l2brsgen', 'l2bin', 'l3bin',
524  'l3mapgen']
525  self.require_geo = False
527  self.recipe = processing_rules.RuleSet('MERIS rules', self.rules_dict, self.rules_order)
528 
530  """
531  Sensor MODIS contains MODIS specific recipe and processing methods.
532  """
533  def __init__(self):
534  self.name = 'modis'
535  self.rules_dict = {
536  'level 0': processing_rules.build_rule('level 0', ['nothing lower'],
537  self.run_bottom_error, False),
538  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
539  self.run_l1a, False),
540  'l1brsgen': processing_rules.build_rule('l1brsgen', ['level 1b', 'geo'],
541  self.run_l1brsgen, False),
542  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['level 1b', 'geo'],
543  # self.run_l1mapgen, False),
544  'geo': processing_rules.build_rule('geo', ['level 1a'], self.run_geo,
545  False),
546  'l1aextract': processing_rules.build_rule('l1aextract',
547  ['level 1a', 'geo'],
548  self.run_l1aextract,
549  False),
550  'level 1b': processing_rules.build_rule('level 1b',
551  ['level 1a', 'geo'],
552  self.run_l1b, False),
553  'l2gen': processing_rules.build_rule('l2gen', ['level 1b', 'geo'],
554  self.run_l2gen, False),
555  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
556  self.run_l2extract, False),
557  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
558  self.run_l2brsgen, False),
559  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
560  # self.run_l2mapgen, False),
561  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
562  True),
563  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
564  True),
565  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
566  self.run_l3mapgen, False, False),
567  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
568  # False)
569  }
570  # self.rules_order = ['level 0', 'level 1a', 'geo', 'l1aextract',
571  # 'level 1b', 'l1brsgen', 'l1mapgen', 'l2gen', 'l2extract',
572  # 'l2bin', 'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen',
573  # 'smigen']
574  self.rules_order = ['level 0', 'level 1a', 'geo', 'l1aextract',
575  'level 1b', 'l1brsgen', 'l2gen', 'l2extract',
576  'l2bin', 'l2brsgen', 'l3bin', 'l3mapgen']
577  self.require_geo = True
579  self.recipe = processing_rules.RuleSet('MODIS Rules', self.rules_dict, self.rules_order)
580 
581  def run_l1aextract(self, proc):
582  """
583  Set up and run l1aextract_modis.
584  """
585  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
586  'NElon' in proc.par_data and 'NElat' in proc.par_data:
587  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
588  if (start_line is None) or (end_line is None) or (start_pixel is None)\
589  or (end_pixel is None):
590  err_msg = 'Error! Cannot find l1aextract_modis coordinates.'
591  log_and_exit(err_msg)
592  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_modis')
593  l1aextract_cmd = [l1aextract_prog, proc.input_file,
594  str(start_pixel), str(end_pixel),
595  str(start_line), str(end_line),
596  proc.output_file]
597  logging.debug('Executing l1aextract_modis command: "%s"',
598  " ".join(str(x) for x in l1aextract_cmd))
599  status = execute_command(l1aextract_cmd)
600  return status
601 
602  def run_geo(self, proc):
603  """
604  Sets up and runs the MODIS GEO script.
605  """
606  prog = build_executable_path('modis_GEO')
607  # os.path.join(proc.ocssw_root, 'run', 'scripts', 'modis_GEO')
608  args = [proc.input_file]
609  args.append('--output={}'.format(proc.output_file))
610  args.extend(get_options(proc.par_data))
611  # cmd = [prog, args]
612  cmd = [prog]
613  cmd.extend(args)
614  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
615  return execute_command(cmd)
616 
617  def run_l1a(self, proc):
618  """
619  Sets up and runs the MODIS L1A script.
620  """
621  prog = build_executable_path('modis_L1A')
622  args = [proc.input_file]
623  args.append('--output={}'.format(proc.output_file))
624  args.extend(get_options(proc.par_data))
625  cmd = [prog]
626  cmd.extend(args)
627  # logging.debug("\nRunning: " + cmd)
628  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
629  return execute_command(cmd)
630 
631  def run_l1b(self, proc):
632  """
633  Runs the L1B script.
634  """
635  prog = build_executable_path('modis_L1B')
636  args = ['-o', proc.output_file]
637  args.extend(get_options(proc.par_data))
638  # The following is no longer needed, but kept for reference.
639  # args += ' --lutdir $OCSSWROOT/run/var/modisa/cal/EVAL --lutver=6.1.15.1z'
640  args.append(proc.input_file)
641  if not proc.geo_file is None:
642  args.append(proc.geo_file)
643  cmd = [prog]
644  cmd.extend(args)
645  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
646  return execute_command(cmd)
647 
649  """
650  Sensor SeaWiFS contains SeaWiFS sepcific recipe and processing method.
651  """
652  def __init__(self):
653  self.name = 'seawifs'
654  self.rules_dict = {
655  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
656  self.run_bottom_error, False),
657  'l1aextract': processing_rules.build_rule('l1aextract',
658  ['level 1a'],
659  self.run_l1aextract,
660  False),
661  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
662  self.run_l1brsgen, False),
663  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
664  # self.run_l1mapgen, False),
665  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
666  self.run_l1b, False),
667  'l2gen': processing_rules.build_rule('l2gen', ['l1'], self.run_l2gen,
668  False),
669  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
670  self.run_l2extract, False),
671  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
672  self.run_l2brsgen, False),
673  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
674  # self.run_l2mapgen, False),
675  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
676  True),
677  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
678  True, False),
679  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
680  self.run_l3mapgen, False, False),
681  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
682  # False)
683  }
684  # self.rules_order = ['level 1a', 'l1aextract', 'l1brsgen',
685  # 'l1mapgen', 'level 1b', 'l2gen', 'l2extract',
686  # 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
687  # 'l3mapgen', 'smigen']
688  self.rules_order = ['level 1a', 'l1aextract', 'l1brsgen',
689  'level 1b', 'l2gen', 'l2extract',
690  'l2brsgen', 'l2bin', 'l3bin',
691  'l3mapgen']
692  self.require_geo = False
694  self.recipe = processing_rules.RuleSet("SeaWiFS Rules", self.rules_dict, self.rules_order)
695 
696  def run_l1aextract(self, proc):
697  """
698  Set up and run l1aextract_seawifs.
699  """
700  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
701  'NElon' in proc.par_data and 'NElat' in proc.par_data:
702  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
703  if (start_line is None) or (end_line is None) or (start_pixel is None)\
704  or (end_pixel is None):
705  err_msg = 'Error! Cannot compute l1aextract_seawifs coordinates.'
706  log_and_exit(err_msg)
707  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_seawifs')
708  l1aextract_cmd = [l1aextract_prog, proc.input_file,
709  str(start_pixel), str(end_pixel),
710  str(start_line), str(end_line), '1', '1',
711  proc.output_file]
712  logging.debug('Executing l1aextract_seawifs command: "%s"',
713  " ".join(str(x) for x in l1aextract_cmd))
714  status = execute_command(l1aextract_cmd)
715  return status
716 
718  """
719  Sensor VIIRS contains VIIRS sepcific recipe and processing method..
720  """
721  def __init__(self):
722  self.name = 'viirs'
723  self.rules_dict = {
724  'level 1a': processing_rules.build_rule('level 1a', ['nothing lower'],
725  self.run_bottom_error, False),
726  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1', 'geo'],
727  self.run_l1brsgen, False),
728  # 'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1', 'geo'],
729  # self.run_l1mapgen, False),
730  'geo': processing_rules.build_rule('geo', ['level 1a'],
731  self.run_geo, False),
732  'l1aextract': processing_rules.build_rule('l1aextract',
733  ['level 1a', 'geo'],
734  self.run_l1aextract,
735  False),
736  'level 1b': processing_rules.build_rule('level 1b', ['level 1a', 'geo'],
737  self.run_l1b, False),
738  'l2gen': processing_rules.build_rule('l2gen', ['l1', 'geo'],
739  self.run_l2gen, False),
740  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
741  self.run_l2extract, False),
742  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
743  self.run_l2brsgen, False),
744  # 'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
745  # self.run_l2mapgen, False),
746  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
747  True),
748  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
749  True),
750  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
751  self.run_l3mapgen, False, False),
752  # 'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
753  # False)
754  }
755  # self.rules_order = ['level 1a', 'geo', 'l1aextract', 'level 1b', 'l1brsgen',
756  # 'l1mapgen','l2gen', 'l2extract', 'l2bin',
757  # 'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen', 'smigen']
758  self.rules_order = ['level 1a', 'geo', 'l1aextract', 'level 1b', 'l1brsgen',
759  'l2gen', 'l2extract', 'l2bin',
760  'l2brsgen', 'l3bin', 'l3mapgen']
761  self.require_geo = True
763  self.recipe = processing_rules.RuleSet('VIIRS Rules', self.rules_dict, self.rules_order)
764 
765  def run_geo(self, proc):
766  """
767  Set up and run the geolocate_viirs program, returning the exit status of the run.
768  """
769  logging.debug('In run_geolocate_viirs')
770  prog = build_executable_path('geolocate_viirs')
771 
774  if not prog:
775  err_msg = 'Error! Cannot find program geolocate_viirs.'
776  logging.info(err_msg)
777  sys.exit(err_msg)
778  args = ['-ifile={}'.format(proc.input_file), '-geofile_mod={}'.format(proc.output_file)]
779  args.extend(get_options(proc.par_data))
780  cmd = [prog]
781  cmd.extend(args)
782  # cmd = [prog, args]
783  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
784  return execute_command(cmd)
785 
786  def run_l1b(self, proc):
787  logging.debug('In run_viirs_l1b')
788  prog = build_executable_path('calibrate_viirs')
789  # prog='/accounts/melliott/seadas/ocssw/bin/calibrate_viirs'
790 
791  args = ['ifile={}'.format(proc.input_file), 'l1bfile_mod={}'.format(proc.output_file)]
792  args.extend(get_options(proc.par_data))
793  # The following is no longer needed, but kept for reference.
794  # args += ' --lutdir $OCSSWROOT/run/var/modisa/cal/EVAL --lutver=6.1.15.1z'
795  # args += ' ' + proc.input_file
796  if proc.geo_file:
797  pass
798  # args += ' geofile=' + proc.geo_file
799  cmd = [prog]
800  cmd.extend(args)
801  # cmd = [prog, args]
802  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
803  return execute_command(cmd)
804 
805  def run_l1aextract(self, proc):
806  """
807  Set up and run l1aextract_viirs.
808  """
809  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
810  'NElon' in proc.par_data and 'NElat' in proc.par_data:
811  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
812  elif 'sline' in proc.par_data and 'eline' in proc.par_data and\
813  'spixl' in proc.par_data and 'epixl' in proc.par_data:
814  start_line = proc.par_data['sline']
815  end_line = proc.par_data['eline']
816  start_pixel = proc.par_data['spixl']
817  end_pixel = proc.par_data['epixl']
818 
819  if (start_line is None) or (end_line is None) or (start_pixel is None)\
820  or (end_pixel is None):
821  err_msg = 'Error! Cannot find l1aextract_viirs coordinates.'
822  log_and_exit(err_msg)
823  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_viirs')
824  l1aextract_cmd = [l1aextract_prog, proc.input_file,
825  str(start_pixel), str(end_pixel),
826  str(start_line), str(end_line),
827  proc.output_file]
828  logging.debug('Executing l1aextract_viirs command: "%s"',
829  " ".join(str(x) for x in l1aextract_cmd))
830  status = execute_command(l1aextract_cmd)
831  return status
832 
833 def get_obpg_data_file_object(file_specification):
834  """
835  Returns an obpg_data_file object for the file named in file_specification.
836  """
837  ftyper = mlp.get_obpg_file_type.ObpgFileTyper(file_specification)
838  (ftype, sensor) = ftyper.get_file_type()
839  (stime, etime) = ftyper.get_file_times()
840  obpg_data_file_obj = obpg_data_file.ObpgDataFile(file_specification, ftype,
841  sensor, stime, etime,
842  ftyper.attributes)
843  return obpg_data_file_obj
844 
845 def build_executable_path(prog_name):
846  """
847  Returns the directory in which the program named in prog_name is found.
848  None is returned if the program is not found.
849  """
850  exe_path = None
851  candidate_subdirs = ['bin', 'scripts']
852  for subdir in candidate_subdirs:
853  cand_path = os.path.join(OCSSWROOT_DIR, subdir, prog_name)
854  if os.path.exists(cand_path):
855  exe_path = cand_path
856  break
857  return exe_path
858 
859 def build_file_list_file(filename, file_list):
860  """
861  Create a file listing the names of the files to be processed.
862  """
863  with open(filename, 'wt') as file_list_file:
864  for fname in file_list:
865  file_list_file.write(fname + '\n')
866 
867 def build_l2gen_par_file(par_contents, input_file, geo_file, output_file):
868  """
869  Build the parameter file for L2 processing.
870  """
871  dt_stamp = datetime.datetime.today()
872  par_name = ''.join(['L2_', dt_stamp.strftime('%Y%m%d%H%M%S'), '.par'])
873  par_path = os.path.join(cfg_data.hidden_dir, par_name)
874  with open(par_path, 'wt') as par_file:
875  par_file.write('# Automatically generated par file for l2gen\n')
876  par_file.write('ifile=' + input_file + '\n')
877  if not geo_file is None:
878  par_file.write('geofile=' + geo_file + '\n')
879  par_file.write('ofile=' + output_file + '\n')
880  for l2_opt in par_contents:
881  if l2_opt != 'ifile' and l2_opt != 'geofile' \
882  and l2_opt != 'ofile' and l2_opt != 'odir' \
883  and not l2_opt in FILE_USE_OPTS:
884  par_file.write(l2_opt + '=' + par_contents[l2_opt] + '\n')
885  return par_path
886 
887 def check_options(options):
888  """
889  Check command line options
890  """
891  # if options.tar_file:
892  # if os.path.exists(options.tar_file):
893  # err_msg = 'Error! The tar file, {0}, already exists.'. \
894  # format(options.tar_file)
895  # log_and_exit(err_msg)
896  if options.ifile:
897  if not os.path.exists(options.ifile):
898  err_msg = 'Error! The specified input file, {0}, does not exist.'. \
899  format(options.ifile)
900  log_and_exit(err_msg)
901 
902 def clean_files(delete_list):
903  """
904  Delete unwanted files created during processing.
905  """
906  if cfg_data.verbose:
907  print ("Cleaning up files")
908  sys.stdout.flush()
909  files_deleted = 0
910  # Delete any files in the delete list. This contain "interemediate" files
911  # which were needed to complete processing, but which weren't explicitly
912  # requested as output targets.
913  for filepath in delete_list:
914  if cfg_data.verbose:
915  print ('Deleting {0}'.format(filepath))
916  sys.stdout.flush()
917  os.remove(filepath)
918  files_deleted += 1
919  # Delete hidden par files older than the cut off age
920  hidden_files = os.listdir(cfg_data.hidden_dir)
921  par_files = [f for f in hidden_files if f.endswith('.par')]
922  for par_file in par_files:
923  par_path = os.path.join(cfg_data.hidden_dir, par_file)
924  file_age = round(time.time()) - os.path.getmtime(par_path)
925  if file_age > cfg_data.max_file_age:
926  if cfg_data.verbose:
927  print ('Deleting {0}'.format(par_path))
928  sys.stdout.flush()
929  os.remove(par_path)
930  files_deleted += 1
931  if cfg_data.verbose:
932  if not files_deleted:
933  print ('No files were found for deletion.')
934  sys.stdout.flush()
935  elif files_deleted == 1:
936  print ('One file was deleted.')
937  sys.stdout.flush()
938  else:
939  print ('A total of {0} files were deleted.'.format(files_deleted))
940  sys.stdout.flush()
941 
942 def create_levels_list(rules_sets):
943  """
944  Returns a list containing all the levels from all the rules sets.
945  """
946  set_key = list(rules_sets.keys())[0]
947  logging.debug('set_key = %s', (set_key))
948  lvls_lst = [(lvl, [set_key]) for lvl in rules_sets[set_key].rules_order[1:]]
949  for rules_set_name in list(rules_sets.keys())[1:]:
950  for lvl_name in rules_sets[rules_set_name].rules_order[1:]:
951  names_list = [lst_item[0] for lst_item in lvls_lst]
952  if lvl_name in names_list:
953  lvls_lst[names_list.index(lvl_name)][1].append(rules_set_name)
954  else:
955  prev_ndx = rules_sets[rules_set_name].rules_order.index(lvl_name) - 1
956  if rules_sets[rules_set_name].rules_order[prev_ndx] in names_list:
957  ins_ndx = names_list.index(rules_sets[rules_set_name].rules_order[prev_ndx]) + 1
958  else:
959  ins_ndx = 0
960  lvls_lst.insert(ins_ndx, (lvl_name, [rules_set_name]))
961  return lvls_lst
962 
963 
964 def create_help_message(rules_sets):
965  """
966  Creates the message to be displayed when help is provided.
967  """
968  level_names = create_levels_list(rules_sets)
969  message = """
970  %prog [options] parameter_file
971 
972  The parameter_file is similar to, but not exactly like, parameter
973  files for OCSSW processing programs:
974  - It has sections separated by headers which are denoted by "["
975  and "]".
976  The section named "main" is required. Its allowed options are:
977  ifile - Required entry naming the input file(s) to be processed.
978  use_ancillary - use near real time ancillary data
979  deletefiles - delete all the intermediate data files genereated
980  overwrite - overwrite any data files which already exist
981  use_existing - use any data files which already exist
982 
983  Simultaneous use of both the overwrite and use_existing options
984  is not permitted.
985 
986  The names for other sections are the programs for which that section's
987  entries are to be applied. Intermediate sections which are required for the
988  final level of processing do not need to be defined if their default options
989  are acceptable. A section can be empty. The final level of processing
990  must have a section header, even if no entries appear within that section.
991  - Entries within a section appear as key=value. Comma separated lists of
992  values can be used when appropriate.
993  - Comments are marked by "#"; anything appearing on a line after that
994  character is ignored. A line beginning with a "#" is completely ignored.
995 
996  In addition to the main section, the following sections are allowed:
997  Section name: Applicable Instrument(s):
998  ------------- -------------------------\n"""
999 
1000  lvl_name_help = ''
1001  for lname in level_names:
1002  lvl_name_help += ' {0:24s}{1}\n'.\
1003  format(lname[0] + ':', ', '.join(lname[1]))
1004 
1005  message += lvl_name_help
1006  message += """
1007  Example:
1008 
1009  # Sample par file for %prog.
1010  [main]
1011  ifile=2010345034027.L1A_LAC
1012  [l2gen]
1013  l2prod=chlor_a
1014  # final processing level
1015  """
1016  return message
1017 
1018 def do_processing(sensors_sets, par_file, cmd_line_ifile=None):
1019  """
1020  Perform the processing for each step (element of processor_list) needed.
1021  """
1022  global input_file_data
1023  #todo: Break this up into smaller parts!
1024  files_to_delete = []
1025  input_files_list = []
1026  (par_contnts, input_files_list) = get_par_file_contents(par_file,
1027  FILE_USE_OPTS)
1028  if cmd_line_ifile:
1029  skip_par_ifile = True
1030  if os.path.exists(cmd_line_ifile):
1031  input_files_list = [cmd_line_ifile]
1032  else:
1033  msg = 'Error! Specified ifile {0} does not exist.'.\
1034  format(cmd_line_ifile)
1035  sys.exit(msg)
1036  else:
1037  skip_par_ifile = False
1038  if par_contnts['main']:
1039  if (not skip_par_ifile) and (not 'ifile' in par_contnts['main']):
1040  msg = 'Error! No ifile specified in the main section of {0}.'.\
1041  format(par_file)
1042  sys.exit(msg)
1043  # Avoid overwriting file options that are already turned on in cfg_data
1044  # (from command line input).
1045  deletefiles, use_existing, overwrite = get_file_handling_opts(par_contnts)
1046  if deletefiles:
1047  cfg_data.deletefiles = True
1048  if use_existing:
1049  cfg_data.use_existing = True
1050  if overwrite:
1051  cfg_data.overwrite = True
1052  if 'use_ancillary' in par_contnts['main'] and \
1053  int(par_contnts['main']['use_ancillary']) == 0:
1054  cfg_data.get_anc = False
1055  if 'odir' in par_contnts['main']:
1056  dname = par_contnts['main']['odir']
1057  if os.path.exists(dname):
1058  if os.path.isdir(dname):
1059  if cfg_data.output_dir_is_settable:
1060  cfg_data.output_dir = os.path.realpath(dname)
1061  else:
1062  log_msg = 'Ignoring par file specification for output directory, {0}; using command line value, {1}.'.format(par_contnts['main']['odir'], cfg_data.output_dir)
1063  logging.info(log_msg)
1064  else:
1065  msg = 'Error! {0} is not a directory.'.format(dname)
1066  sys.exit(msg)
1067  else:
1068  msg = 'Error! {0} does not exist.'.format(dname)
1069  sys.exit(msg)
1070 
1071  logging.debug('cfg_data.overwrite: ' + str(cfg_data.overwrite))
1072  logging.debug('cfg_data.use_existing: ' + str(cfg_data.use_existing))
1073  logging.debug('cfg_data.deletefiles: ' + str(cfg_data.deletefiles))
1074  if cfg_data.overwrite and cfg_data.use_existing:
1075  err_msg = 'Error! Incompatible options overwrite and use_existing were found in {0}.'.format(par_file)
1076  log_and_exit(err_msg)
1077  if len(input_files_list) == 1:
1078  if MetaUtils.is_ascii_file(input_files_list[0]) and not MetaUtils.is_metadata_file(input_files_list[0]):
1079  input_files_list = read_file_list_file(input_files_list[0])
1080  input_file_data = get_input_files_type_data(input_files_list)
1081  if not input_file_data:
1082  log_and_exit('No valid data files were specified for processing.')
1083  logging.debug("input_file_data: " + str(input_file_data))
1084  src_files = get_source_files(input_file_data)
1085  sys.stdout.flush()
1086  try:
1087  get_processors(src_files, input_file_data, par_contnts, files_to_delete)
1088  except Exception:
1089  if DEBUG:
1090  err_msg = get_traceback_message()
1091  log_and_exit(err_msg)
1092  else:
1093  err_msg = "Unrecoverable error encountered in processing."
1094  log_and_exit(err_msg)
1095  finally:
1096  clean_files(files_to_delete)
1097  if cfg_data.verbose:
1098  print ("Processing complete.")
1099  sys.stdout.flush()
1100  logging.debug("Processing complete.")
1101  return
1102 
1103 def execute_command(command):
1104  """
1105  Execute what is contained in command and then output the results to log
1106  files and the console, as appropriate.
1107  """
1108  if DEBUG:
1109  print ("Entering execute_command, cfg_data.verbose =",
1110  cfg_data.verbose)
1111  log_msg = 'Executing command:\n {0}'.format(command)
1112  logging.debug(log_msg)
1113  # subproc = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE,
1114  # stderr=subprocess.PIPE)
1115  subproc = subprocess.run(command, capture_output=True, text=True, shell=False)
1116  std_out, err_out = subproc.stdout, subproc.stderr
1117  status = subproc.returncode
1118  logging.info(std_out)
1119  logging.info(err_out)
1120  if cfg_data.verbose:
1121  print (std_out)
1122  return status
1123 
1124 def extract_par_section(par_contents, section):
1125  """
1126  Returns a single section (e.g. L1a, GEO, L1B, L2, etc.) from the "par" file.
1127  """
1128  sect_dict = {}
1129  for key in list(par_contents[section].keys()):
1130  sect_dict[key] = par_contents[section][key]
1131  return sect_dict
1132 
1133 def find_geo_file(inp_file):
1134  """
1135  Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1136  returns that file name; otherwise, returns None.
1137  """
1138  src_dir = os.path.dirname(inp_file)
1139  src_base = os.path.basename(inp_file)
1140  src_base_tmp = src_base.replace("L1B", "GEO")
1141  geo_base = src_base_tmp.replace("_LAC", "")
1142  # geo_base = src_base.rsplit('.', 1)[0]
1143  geo_file = os.path.join(src_dir, geo_base)
1144  if not os.path.exists(geo_file):
1145  geo_file = None
1146  return geo_file
1147 
1148 def find_geo_file2(inp_file, instrument, lvl):
1149  """
1150  Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1151  returns that file name; otherwise, returns None.
1152  """
1153  src_dir = os.path.dirname(inp_file)
1154  src_base = os.path.basename(inp_file)
1155  if instrument.find('hawkeye') != -1:
1156  src_base_tmp = src_base.replace("L1A", "GEO")
1157  geo_base = src_base_tmp.replace("nc", "hdf")
1158  elif instrument.find('modis') != -1:
1159  if lvl.find('level 1a') != -1:
1160  src_base_tmp = src_base.replace("L1A", "GEO")
1161  geo_base_tmp = src_base_tmp.replace("_LAC", "")
1162  if geo_base_tmp.find('MODIS') != -1:
1163  geo_base = geo_base_tmp.replace("nc", "hdf")
1164  else:
1165  geo_base = geo_base_tmp
1166  elif lvl.find('level 1b') != -1:
1167  src_base_tmp = src_base.replace("L1B", "GEO")
1168  geo_base = src_base_tmp.replace("_LAC", "")
1169  elif instrument.find('viirs') != -1:
1170  if lvl.find('level 1a') != -1:
1171  geo_base_tmp = src_base.replace("L1A", "GEO-M")
1172  elif lvl.find('level 1b') != -1:
1173  geo_base_tmp = src_base.replace("L1B", "GEO")
1174  if geo_base_tmp.find('VIIRS') != -1:
1175  geo_base_tmp2 = geo_base_tmp.replace("nc", "hdf")
1176  geo_base = geo_base_tmp2.replace("GEO-M", "GEO_M")
1177  else:
1178  geo_base = geo_base_tmp
1179  # geo_base = src_base.rsplit('.', 1)[0]
1180  geo_file = os.path.join(src_dir, geo_base)
1181  if not os.path.exists(geo_file):
1182  geo_file = None
1183  return geo_file
1184 
1185 def find_viirs_geo_file(proc, first_svm_file):
1186  """
1187  Searches for a GEO file corresponding to first_svm_file. If that GEO file
1188  exists, returns that file name; otherwise, returns None.
1189  """
1190  fname = first_svm_file.replace('SVM01', 'GMTCO').rstrip()
1191  if not os.path.exists(fname):
1192  fname = None
1193  return fname
1194 
1195 def get_batch_output_name(file_set, suffix):
1196  """
1197  Returns the output file for a "batch" run, i.e. a process that can accept
1198  multiple inputs, such as l2bin or l3bin.
1199  """
1200  mission_prefixes = ['A', 'C', 'O', 'S', 'T']
1201  stem = 'out'
1202  if not len(file_set): # == 0:
1203  err_msg = "Error! An output file name could not be determined."
1204  log_and_exit(err_msg)
1205  elif len(file_set) == 1:
1206  stem = os.path.splitext(file_set[0])[0]
1207  else:
1208  earliest_file = file_set[0]
1209  latest_file = file_set[0]
1210  earliest_file_date = get_file_date(earliest_file)
1211  latest_file_date = get_file_date(latest_file)
1212  for cur_file in file_set[1:]:
1213  file_date = get_file_date(cur_file)
1214  if file_date < earliest_file_date:
1215  earliest_file = cur_file
1216  earliest_file_date = file_date
1217  elif file_date > latest_file_date:
1218  latest_file = cur_file
1219  latest_file_date = file_date
1220  if (earliest_file[0] == latest_file[0]) and \
1221  (earliest_file[0] in mission_prefixes):
1222  stem = earliest_file[0]
1223  else:
1224  stem = ''
1225  earliest_file_date_stamp = earliest_file_date.strftime('%Y%j')
1226  latest_file_date_stamp = latest_file_date.strftime('%Y%j')
1227  if earliest_file_date_stamp == latest_file_date_stamp:
1228  stem += earliest_file_date_stamp
1229  else:
1230  stem += earliest_file_date_stamp + latest_file_date_stamp
1231  return ''.join([stem, '.', suffix])
1232 
1233 def get_data_file_option(par_contents, opt_text):
1234  """
1235  If found in par_contents, the value for the option specified by opt_text
1236  is returned; otherwise, False is returned.
1237  """
1238  opt_found = False
1239  if opt_text in par_contents['main']:
1240  opt_str = par_contents['main'][opt_text].upper()
1241  opt_found = mlp_utils.is_option_value_true(opt_str)
1242  return opt_found
1243 
1245  """
1246  Run the lonlat2pixline program and return the parameters found.
1247  """
1248  if proc.geo_file:
1249  # MODIS
1250  in_file = proc.geo_file
1251  else:
1252  # SeaWiFS
1253  in_file = proc.input_file
1254  args = [in_file,
1255  proc.par_data['SWlon'],
1256  proc.par_data['SWlat'],
1257  proc.par_data['NElon'],
1258  proc.par_data['NElat']]
1259  lonlat_prog = os.path.join(proc.ocssw_bin, 'lonlat2pixline')
1260  lonlat_cmd = [lonlat_prog]
1261  lonlat_cmd.extend(args)
1262  logging.debug('Executing lonlat2pixline command: "%s"', " ".join(str(x) for x in lonlat_cmd))
1263  process_output = subprocess.run(lonlat_cmd, capture_output=True, text=True, shell=False)
1264  lonlat_output = process_output.stdout.splitlines()
1265  start_line = None
1266  end_line = None
1267  start_pixel = None
1268  end_pixel = None
1269  for line in lonlat_output:
1270  line_text = str(line).strip("'")
1271  if 'sline' in line_text:
1272  start_line = int(line_text.split('=')[1])
1273  if 'eline' in line_text:
1274  end_line = int(line_text.split('=')[1])
1275  if 'spixl' in line_text:
1276  start_pixel = int(line_text.split('=')[1])
1277  if 'epixl' in line_text:
1278  end_pixel = int(line_text.split('=')[1])
1279  return start_line, end_line, start_pixel, end_pixel
1280 
1281 def get_file_date(filename):
1282  """
1283  Get a Python Date object from a recognized file name's year and day of year.
1284  """
1285  base_filename = os.path.basename(filename)
1286  if re.match(r'[ACMOQSTV]\d\d\d\d\d\d\d.*', base_filename):
1287  year = int(base_filename[1:5])
1288  doy = int(base_filename[5:8])
1289  elif re.match(r'\d\d\d\d\d\d\d.*', base_filename):
1290  # Some Aquarius
1291  year = int(base_filename[0:4])
1292  doy = int(base_filename[4:7])
1293  elif re.match(r'\w*_npp_d\d\d\d\d\d\d\d_.*', base_filename):
1294  # NPP
1295  prefix_removed_name = re.sub(r'\w*_npp_d', '', base_filename)
1296  year = int(prefix_removed_name[0:4])
1297  doy = int(prefix_removed_name[5:7])
1298  else:
1299  err_msg = 'Unable to determine date for {0}'.format(filename)
1300  log_and_exit(err_msg)
1301  file_date = datetime.datetime(year, 1, 1) + datetime.timedelta(doy - 1)
1302  return file_date
1303 
1304 def get_file_handling_opts(par_contents):
1305  """
1306  Returns the values of the file handling options in par_contents.
1307  """
1308  deletefiles = get_data_file_option(par_contents, 'deletefiles')
1309  use_existing = get_data_file_option(par_contents, 'use_existing')
1310  overwrite = get_data_file_option(par_contents, 'overwrite')
1311  return deletefiles, use_existing, overwrite
1312 
1313 def get_input_files(par_data):
1314  """
1315  Get input files found in the uber par file's ifile line, a file list file,
1316  or both. Ensure that the list contains no duplicates.
1317  """
1318  #inp_file_list = None
1319  from_ifiles = []
1320  from_infilelist = []
1321  if 'ifile' in par_data['main']:
1322  inp_file_str = par_data['main']['ifile'].split('#', 2)[0]
1323  cleaned_str = re.sub(r'[\t,:\[\]()"\']', ' ', inp_file_str)
1324  from_ifiles = cleaned_str.split()
1325  if 'infilelist' in par_data['main']:
1326  infilelist_name = par_data['main']['infilelist']
1327  if os.path.exists(infilelist_name):
1328  if os.path.isfile(infilelist_name) and \
1329  os.access(infilelist_name, os.R_OK):
1330  with open(infilelist_name, 'rt') as in_file_list_file:
1331  inp_lines = in_file_list_file.readlines()
1332  from_infilelist = [fn.rstrip() for fn in inp_lines
1333  if not re.match(r'^\s*#', fn)]
1334  if len(from_ifiles) == 0 and len(from_infilelist) == 0:
1335  return None
1336  # Make sure there are no duplicates. Tests with timeit showed that
1337  # list(set()) is much faster than a "uniqify" function.
1338  return list(set(from_ifiles + from_infilelist))
1339 
1340 def get_input_files_type_data(input_files_list):
1341  """
1342  Returns a dictionary with the the file_type (L0, L1A, L2, etc) and
1343  instrument for each file in the input list.
1344  """
1345  converter = {
1346  'geo': 'geo',
1347  'level 0': 'level 0',
1348  'level 1 browse data': 'l1brsgen',
1349  'level 1a': 'level 1a',
1350  'level 1b': 'level 1b',
1351  'level 1c': 'level 1c',
1352  'sdr': 'level 1b',
1353  'level 2': 'l2gen',
1354  'level 3 binned': 'l3bin'
1355  # 'level 3 smi': 'smigen'
1356  }
1357  input_file_type_data = {}
1358  for inp_file in input_files_list:
1359  # if os.path.dirname((inp_file)) == '':
1360  # inp_path = os.path.join(os.getcwd(), inp_file)
1361  # else:
1362  # inp_path = inp_file
1363  file_typer = mlp.get_obpg_file_type.ObpgFileTyper(inp_file)
1364  file_type, file_instr = file_typer.get_file_type()
1365  #if file_type in converter:
1366  # file_type = converter[file_type.lower()]
1367  #else:
1368  # err_msg =
1369  # 'Error! Cannot process file type {0} of {1}'.format(file_type,
1370  # inp_file)
1371  if file_type.lower() in converter:
1372  file_type = converter[file_type.lower()]
1373  input_file_type_data[inp_file] = (file_type, file_instr.lower())
1374  else:
1375 
1376  # input_file_type_data[inp_file] = ('unknown', 'unknown')
1377  warn_msg = "Warning: Unable to determine a type for file {0}. It will not be processed.".format(inp_file)
1378  print (warn_msg)
1379  logging.info(warn_msg)
1380  return input_file_type_data
1381 
1382 def get_intermediate_processors(sensor, existing_procs, rules, lowest_source_level):
1383  """
1384  Create processor objects for products which are needed, but not explicitly
1385  specified in the par file.
1386  """
1387  existing_products = [proc.target_type for proc in existing_procs]
1388  intermediate_products = get_intermediate_products(existing_products, rules,
1389  lowest_source_level)
1390  intermediate_processors = []
1391  for prod in intermediate_products:
1392  # Create a processor for the product and add it to the intermediate
1393  # processors list
1394  if not prod in existing_products:
1395  new_proc = processor.Processor(sensor, rules, prod, {},
1396  cfg_data.hidden_dir)
1397  intermediate_processors.append(new_proc)
1398  return intermediate_processors
1399 
1400 def get_intermediate_products(existing_prod_names, ruleset,
1401  lowest_source_level):
1402  """
1403  Find products which are needed, but not explicitly specified by the
1404  par file.
1405  """
1406  required_progs = []
1407  for prog in existing_prod_names:
1408  candidate_progs = get_required_programs(prog, ruleset,
1409  lowest_source_level)
1410  if not isinstance(candidate_progs, type(None)):
1411  for candidate_prog in candidate_progs:
1412  required_progs.append(candidate_prog)
1413  required_progs = uniqify_list(required_progs)
1414  required_progs.sort()
1415  return required_progs
1416 
1418  """
1419  Returns the extension for an L2 file. For the time being, this is
1420  just '.L2'; however, different extensions may be wanted in the future, thus
1421  this function is in place.
1422  """
1423  return '.L2'
1424 
1426  """
1427  Returns the extension for an L3 Binned file. For the time being, this is
1428  just '.L3bin'; however, different extensions may be wanted in the future,
1429  thus this function is in place.
1430  """
1431  return '.L3b'
1432 
1433 def get_lowest_source_level(source_files):
1434  """
1435  Find the level of the lowest level source file to be processed.
1436  """
1437  order = ['level 1a', 'geo', 'level 1b', 'l2gen',
1438  'l2bin', 'l3bin', 'l3mapgen']
1439  if len(source_files) == 1:
1440  return list(source_files.keys())[0]
1441  else:
1442  lowest = list(source_files.keys())[0]
1443  for key in list(source_files.keys())[1:]:
1444  # if key < lowest:
1445  if order.index(key) < order.index(lowest):
1446  lowest = key
1447  return lowest
1448 
1449 def get_options(par_data):
1450  """
1451  Extract the options for a program to be run from the corresponding data in
1452  the uber par file.
1453  """
1454  options = []
1455  for key in par_data:
1456  if key != 'ofile' and key != 'odir' and not key.lower() in FILE_USE_OPTS:
1457  if par_data[key]:
1458  options.append('{}={}'.format(key,par_data[key]))
1459  else:
1460  options.append(key)
1461  return options
1462 
1463 def get_output_name2(inp_files, targ_prog, suite=None, oformt=None, res=None):
1464  """
1465  Determine what the output name would be if targ_prog is run on input_files.
1466  """
1467  cl_opts = optparse.Values()
1468  cl_opts.suite = suite
1469  cl_opts.oformat = oformt
1470  cl_opts.resolution = res
1471  if not isinstance(inp_files, list):
1472  data_file = get_obpg_data_file_object(inp_files)
1473  output_name = mlp.get_output_name_utils.get_output_name([data_file], targ_prog,
1474  cl_opts)
1475  else:
1476  output_name = mlp.get_output_name_utils.get_output_name(inp_files, targ_prog,
1477  cl_opts)
1478  return output_name
1479 
1480 def get_output_name3(input_name, input_files, suffix):
1481  """
1482  Determine the output name for a program to be run.
1483  """
1484  # Todo: rename to get_output_name and delete other get_output_name
1485  output_name = None
1486  if input_name in input_files:
1487  if input_files[input_name][0] == 'level 0' and \
1488  input_files[input_name][1].find('modis') != -1:
1489  if input_files[input_name][1].find('aqua') != -1:
1490  first_char = 'A'
1491  else:
1492  first_char = 'T'
1493  time_stamp = ''
1494  if os.path.exists(input_name + '.const'):
1495  with open(input_name + '.const') as constructor_file:
1496  constructor_data = constructor_file.readlines()
1497  for line in constructor_data:
1498  if line.find('starttime=') != -1:
1499  start_time = line[line.find('=') + 1].strip()
1500  break
1501  time_stamp = ProcUtils.date_convert(start_time, 't', 'j')
1502  else:
1503  if re.match(r'MOD00.P\d\d\d\d\d\d\d\.\d\d\d\d', input_name):
1504  time_stamp = input_name[7:14] + input_name[15:19] + '00'
1505  else:
1506  err_msg = "Cannot determine time stamp for input file {0}".\
1507  format(input_name)
1508  log_and_exit(err_msg)
1509  output_name = first_char + time_stamp + '.L1A'
1510  else:
1511 # if input_files[input_name] == ''
1512  (dirname, basename) = os.path.split(input_name)
1513  basename_parts = basename.rsplit('.', 2)
1514  output_name = os.path.join(dirname, basename_parts[0] + '.' +
1515  suffix)
1516  else:
1517  (dirname, basename) = os.path.split(input_name)
1518  basename_parts = basename.rsplit('.', 2)
1519  output_name = os.path.join(dirname, basename_parts[0] + '.' + suffix)
1520  return output_name
1521 
1522 def get_par_file_contents(par_file, acceptable_single_keys):
1523  """
1524  Return the contents of the input "par" file.
1525  """
1526  acceptable_par_keys = {
1527  'level 0' : 'level 0', 'l0' : 'level 0',
1528  'level 1a' : 'level 1a', 'l1a' : 'level 1a', 'l1agen': 'level 1a',
1529  'modis_L1A': 'level 1a',
1530  'l1brsgen': 'l1brsgen',
1531  # 'l1mapgen': 'l1mapgen',
1532  'l1aextract': 'l1aextract',
1533  'l1aextract_modis': 'l1aextract_modis',
1534  'l1aextract_seawifs' : 'l1aextract_seawifs',
1535  'l1aextract_viirs' : 'l1aextract_viirs',
1536  'l1brsgen' : 'l1brsgen',
1537  'geo' : 'geo', 'modis_GEO': 'geo', 'geolocate_viirs': 'geo',
1538  'geolocate_hawkeye': 'geo',
1539  'level 1b' : 'level 1b', 'l1b' : 'level 1b', 'l1bgen' : 'level 1b',
1540  'modis_L1B': 'level 1b', 'calibrate_viirs': 'level 1b',
1541  'level 2' : 'l2gen',
1542  'l2gen' : 'l2gen',
1543  'l2bin' : 'l2bin',
1544  'l2brsgen' : 'l2brsgen',
1545  'l2extract' : 'l2extract',
1546  # 'l2mapgen' : 'l2mapgen',
1547  'l3bin' : 'l3bin',
1548  'l3mapgen' : 'l3mapgen',
1549  # 'smigen' : 'smigen',
1550  'main' : 'main'
1551  }
1552  if cfg_data.verbose:
1553  print ("Processing %s" % par_file)
1554  par_reader = uber_par_file_reader.ParReader(par_file,
1555  acceptable_single_keys,
1556  acceptable_par_keys)
1557  par_contents = par_reader.read_par_file()
1558  ori_keys = list(par_contents.keys())
1559  for key in ori_keys:
1560  if key in acceptable_par_keys:
1561  if key != acceptable_par_keys[key]:
1562  par_contents[acceptable_par_keys[key]] = par_contents[key]
1563  del par_contents[key]
1564  else:
1565  acc_key_str = ', '.join(list(acceptable_par_keys.keys()))
1566  err_msg = """Error! Parameter file {0} contains a section titled "{1}", which is not a recognized program.
1567 The recognized programs are: {2}""".format(par_file, key, acc_key_str)
1568 
1569  log_and_exit(err_msg)
1570  if 'main' in par_contents:
1571  input_files_list = get_input_files(par_contents)
1572  else:
1573  err_msg = 'Error! Could not find section "main" in {0}'.format(par_file)
1574  log_and_exit(err_msg)
1575  return par_contents, input_files_list
1576 
1577 def get_processors2(sensor, par_contents, rules, lowest_source_level):
1578  """
1579  Determine the processors which are needed.
1580  """
1581  processors = []
1582  for key in list(par_contents.keys()):
1583  if key != 'main':
1584  section_contents = extract_par_section(par_contents, key)
1585  proc = processor.Processor(sensor, rules, key, section_contents,
1586  cfg_data.hidden_dir)
1587  processors.append(proc)
1588  if processors:
1589  processors.sort() # needs sorted for get_intermediate_processors
1590  processors += get_intermediate_processors(sensor, processors, rules,
1591  lowest_source_level)
1592  processors.sort()
1593  return processors
1594 
1595 def exe_processor(proc, src_files, src_lvl):
1596  """
1597  Execute the processor.
1598  """
1599  if proc.out_directory == cfg_data.hidden_dir:
1600  proc.out_directory = cfg_data.output_dir
1601  if proc.requires_batch_processing():
1602  logging.debug('Performing batch processing for ' + str(proc))
1603  out_file= run_batch_processor(proc,
1604  src_files[src_lvl])
1605  return out_file, False
1606  else:
1607  if proc.rule_set.rules[proc.target_type].action:
1608  logging.debug('Performing nonbatch processing for ' + str(proc))
1609  # src_file_sets = get_source_file_sets(proc.rule_set.rules[proc.target_type].src_file_types,
1610  # src_files, src_lvl,
1611  # proc.rule_set.rules[proc.target_type].requires_all_sources)
1612  success_count = 0
1613  # for file_set in src_file_sets:
1614  out_file, used_exsiting = run_nonbatch_processor(proc)
1615  if out_file:
1616  success_count += 1
1617  if success_count == 0:
1618  print('The {0} processor produced no output files.'.format(proc.target_type), flush=True)
1619  msg = 'The {0} processor produced no output files.'.format(proc.target_type)
1620  # logging.info(msg)
1621  logging.debug(msg)
1622  return out_file, used_exsiting
1623  else:
1624  msg = '-I- There is no way to create {0} files for {1}.'.format(proc.target_type, proc.instrument)
1625  logging.info(msg)
1626 
1627 def get_processors(src_files, input_files, par_contents, files_to_delete):
1628  """
1629  Determine how to chain the processors together.
1630  """
1631  order = ['level 0', 'level 1a', 'level 1c', 'geo', 'l1aextract',
1632  'level 1b', 'l1brsgen', 'l2gen', 'l2extract', 'l2brsgen',
1633  'l2bin', 'l3bin', 'l3mapgen']
1634  key_list = list(par_contents.keys())
1635  last_key = key_list[-1]
1636  if 'l2extract' in key_list:
1637  if not ('l2gen' in key_list):
1638  pos = key_list.index('l2extract')
1639  key_list.insert(pos, 'l2gen')
1640  items = list(par_contents.items())
1641  items.insert(pos, ('l2gen', {}))
1642  par_contents = dict(items)
1643  elif 'l2brsgen' in key_list:
1644  if not ('l2gen' in key_list):
1645  pos = key_list.index('l2brsgen')
1646  key_list.insert(pos, 'l2gen')
1647  items = list(par_contents.items())
1648  items.insert(pos, ('l2gen', {}))
1649  par_contents = dict(items)
1650  elif 'l2bin' in key_list:
1651  # if not ('l2gen' in key_list or 'l2extract' in key_list):
1652  if not ('l2gen' in key_list):
1653  pos = key_list.index('l2bin')
1654  key_list.insert(pos, 'l2gen')
1655  items = list(par_contents.items())
1656  items.insert(pos, ('l2gen', {}))
1657  par_contents = dict(items)
1658  elif 'l3bin' in key_list:
1659  # if not ('l2gen' in key_list or 'l2extract' in key_list):
1660  if not ('l3bin' in key_list):
1661  pos = key_list.index('l3bin')
1662  key_list.insert(pos, 'l2bin')
1663  items = list(par_contents.items())
1664  items.insert(pos, ('l2bin', {}))
1665  par_contents = dict(items)
1666  elif 'l3mapgen' in key_list:
1667  # if not ('l2gen' in key_list or 'l2extract' in key_list):
1668  if not ('l2gen' in key_list):
1669  pos = key_list.index('l3mapgen')
1670  key_list.insert(pos, 'l2gen')
1671  items = list(par_contents.items())
1672  items.insert(pos, ('l2gen', {}))
1673  par_contents = dict(items)
1674  for key in key_list:
1675  if key != 'main':
1676  section_contents = extract_par_section(par_contents, key)
1677  if not order.index(key) > order.index('l2gen'):
1678  src_lvls = list(src_files.keys())
1679  if not key in src_files:
1680  src_files[key] = []
1681  for src_lvl in src_lvls:
1682  if order.index(src_lvl) < order.index('l2gen'):
1683  for file in src_files[src_lvl]:
1684  file_typer = mlp.get_obpg_file_type.ObpgFileTyper(file)
1685  instrument = file_typer.get_file_type()[1].lower().split()[0]
1686  # instrument = input_files[file][1].split()[0]
1687  logging.debug("instrument: " + instrument)
1688  if instrument in sensors_sets:
1689  rules = sensors_sets[instrument].recipe
1690  sensor = sensors_sets[instrument]
1691  else:
1692  rules = sensors_sets['general'].recipe
1693  sensor = sensors_sets['general']
1694  proc = processor.Processor(sensor, rules, key, section_contents,
1695  cfg_data.hidden_dir)
1696  proc.input_file = file
1697  if file_typer.get_file_type()[0].lower().find('level 0') == -1:
1698  if sensor.require_geo and key != 'geo':
1699  if 'geo' in src_lvls:
1700  proc.geo_file = src_files['geo'][0]
1701  else:
1702  proc.geo_file = find_geo_file2(proc.input_file, instrument, src_lvl)
1703 
1704  if not proc.geo_file:
1705  if src_lvl.find('level 1b') != -1:
1706  err_msg = 'Error! Cannot produce GEO file for {0}, Need level 1a file'.format(file)
1707  # log_and_exit(err_m sg)
1708  logging.debug(err_msg , flush=True)
1709  print(err_msg)
1710  print ('Skipping processing on file {0}.'.format(file), flush=True)
1711  logging.debug('')
1712  log_msg = 'Skipping processing on file {0}.'.format(file)
1713  logging.debug(log_msg)
1714  continue
1715  else:
1716  proc_geo = processor.Processor(sensor, rules, 'geo', {},
1717  cfg_data.hidden_dir)
1718  proc_geo.input_file = file
1719  print ('Running geo on file {0}.'.format(file), flush=True)
1720  logging.debug('')
1721  log_msg = 'Processing for geo:'
1722  logging.debug(log_msg)
1723  proc.geo_file, used_existing = exe_processor(proc_geo, src_files, src_lvl)
1724  if not proc.geo_file:
1725  print ('Skipping processing on file {0}.'.format(file), flush=True)
1726  logging.debug('')
1727  log_msg = 'Skipping processing on file {0}.'.format(file)
1728  logging.debug(log_msg)
1729  continue
1730  if cfg_data.deletefiles and not used_existing:
1731  if proc.geo_file:
1732  files_to_delete.append(proc.geo_file)
1733  if used_existing:
1734  print ('Used existing geo file {0}.'.format(proc.geo_file))
1735  logging.debug('')
1736  log_msg = 'Used existing geo file {0}.'.format(proc.geo_file)
1737  logging.debug(log_msg)
1738  if key == 'l2gen' and sensor.require_l1b_for_l2gen and src_lvl.find('level 1b') == -1:
1739  proc_l1b = processor.Processor(sensor, rules, 'level 1b', {},
1740  cfg_data.hidden_dir)
1741  if sensor.require_geo:
1742  proc_l1b.input_file = file
1743  proc_l1b.geo_file = proc.geo_file
1744  print ('Running level 1b on file {0}.'.format(file), flush=True)
1745  logging.debug('')
1746  log_msg = 'Processing for level 1b:'
1747  logging.debug(log_msg)
1748  proc.input_file, used_existing = exe_processor(proc_l1b, src_files, src_lvl)
1749  if not proc.input_file:
1750  print ('Skipping processing on file {0}.'.format(file), flush=True)
1751  logging.debug('')
1752  log_msg = 'Skipping processing on file {0}.'.format(file)
1753  logging.debug(log_msg)
1754  continue
1755  if cfg_data.deletefiles and not used_existing:
1756  if proc.input_file:
1757  files_to_delete.append(proc.input_file)
1758  if used_existing:
1759  print ('Used existing level 1b file {0}.'.format(proc.input_file))
1760  logging.debug('')
1761  log_msg = 'Used existing level 1b file {0}.'.format(proc.input_file)
1762  logging.debug(log_msg)
1763  if proc.input_file:
1764  file_input = proc.input_file
1765  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file), flush=True)
1766  logging.debug('')
1767  log_msg = 'Processing for {0}:'.format(proc.target_type)
1768  logging.debug(log_msg)
1769  out_file, used_existing = exe_processor(proc, src_files, src_lvl)
1770  if out_file:
1771  src_files[key].append(out_file)
1772  else:
1773  print ('Skipping processing on file {0}.'.format(file_input, flush=True))
1774  logging.debug('')
1775  log_msg = 'Skipping processing on file {0}.'.format(file_input)
1776  logging.debug(log_msg)
1777  continue
1778  if cfg_data.deletefiles and not used_existing and key != last_key:
1779  if out_file and key.find('brsgen') == -1:
1780  files_to_delete.append(out_file)
1781  if used_existing:
1782  print ('Used existing file {0}.'.format(out_file))
1783  logging.debug('')
1784  log_msg = 'Used existing file {0}.'.format(out_file)
1785  logging.debug(log_msg)
1786  if key.find('l1aextract') != -1:
1787  src_files['level 1a'] = src_files[key]
1788  del src_files['l1aextract']
1789  if 'geo' in src_files:
1790  del src_files['geo']
1791  src_lvls.remove('geo')
1792  # input_files[src_files[src_lvl][0]] = input_files[file]
1793  else:
1794  if key != 'level 1a':
1795  proc_l1a = processor.Processor(sensor, rules, 'level 1a', {},
1796  cfg_data.hidden_dir)
1797  proc_l1a.input_file = file
1798  print ('Running level 1a on file {0}.'.format(file), flush=True)
1799  logging.debug('')
1800  log_msg = 'Processing for level 1a:'
1801  logging.debug(log_msg)
1802  proc.input_file, used_existing = exe_processor(proc_l1a, src_files, src_lvl)
1803  if not proc.input_file:
1804  print ('Skipping processing on file {0}.'.format(file), flush=True)
1805  logging.debug('')
1806  log_msg = 'Skipping processing on file {0}.'.format(file)
1807  logging.debug(log_msg)
1808  continue
1809  if cfg_data.deletefiles and not used_existing:
1810  if proc.input_file:
1811  files_to_delete.append(proc.input_file)
1812  if used_existing:
1813  print ('Used existing level 1a file {0}.'.format(proc.input_file))
1814  logging.debug('')
1815  log_msg = 'Used existing level 1a file {0}.'.format(proc.input_file)
1816  logging.debug(log_msg)
1817  if proc.input_file and sensor.require_geo and key != 'geo' and key != 'level 1a':
1818  proc.geo_file = find_geo_file2(proc.input_file, instrument, 'level 1a')
1819  if not proc.geo_file:
1820  proc_geo = processor.Processor(sensor, rules, 'geo', {},
1821  cfg_data.hidden_dir)
1822  proc_geo.input_file = proc.input_file
1823  print ('Running geo on file {0}.'.format(proc.input_file), flush=True)
1824  logging.debug('')
1825  log_msg = 'Processing for geo:'
1826  logging.debug(log_msg)
1827  proc.geo_file, used_existing = exe_processor(proc_geo, src_files, src_lvl)
1828  if not proc.geo_file:
1829  print ('Skipping processing on file {0}.'.format(proc.input_file), flush=True)
1830  logging.debug('')
1831  log_msg = 'Skipping processing on file {0}.'.format(proc.input_file)
1832  logging.debug(log_msg)
1833  continue
1834  if cfg_data.deletefiles and not used_existing:
1835  if proc.geo_file:
1836  files_to_delete.append(proc.geo_file)
1837  if used_existing:
1838  print ('Used existing geo file {0}.'.format(proc.geo_file))
1839  logging.debug('')
1840  log_msg = 'Used existing geo file {0}.'.format(proc.geo_file)
1841  logging.debug(log_msg)
1842  if key == 'l2gen' and sensor.require_l1b_for_l2gen:
1843  proc_l1b = processor.Processor(sensor, rules, 'level 1b', {},
1844  cfg_data.hidden_dir)
1845  if sensor.require_geo:
1846  proc_l1b.input_file = proc.input_file
1847  proc_l1b.geo_file = proc.geo_file
1848  print ('Running level 1b on file {0}.'.format(proc.input_file), flush=True)
1849  logging.debug('')
1850  log_msg = 'Processing for level 1b:'
1851  logging.debug(log_msg)
1852  file_input = proc.input_file
1853  proc.input_file, used_existing = exe_processor(proc_l1b, src_files, src_lvl)
1854  if not proc.input_file:
1855  print ('Skipping processing on file {0}.'.format(file_input), flush=True)
1856  logging.debug('')
1857  log_msg = 'Skipping processing on file {0}.'.format(file_input)
1858  logging.debug(log_msg)
1859  continue
1860  if cfg_data.deletefiles and not used_existing:
1861  if proc.input_file:
1862  files_to_delete.append(proc.input_file)
1863  if used_existing:
1864  print ('Used existing level 1b file {0}.'.format(proc_l1b.input_file))
1865  logging.debug('')
1866  log_msg = 'Used existing level 1b file {0}.'.format(proc_l1b.input_file)
1867  logging.debug(log_msg)
1868  if proc.input_file:
1869  file_input = proc.input_file
1870  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file), flush=True)
1871  logging.debug('')
1872  log_msg = 'Processing for {0}:'.format(proc.target_type)
1873  logging.debug(log_msg)
1874  out_file, used_existing = exe_processor(proc, src_files, src_lvl)
1875  if out_file:
1876  src_files[key].append(out_file)
1877  else:
1878  print ('Skipping processing on file {0}.'.format(file_input), flush=True)
1879  logging.debug('')
1880  log_msg = 'Skipping processing on file {0}.'.format(file_input)
1881  logging.debug(log_msg)
1882  continue
1883  if cfg_data.deletefiles and not used_existing and key != last_key:
1884  if out_file and key.find('brsgen') == -1:
1885  files_to_delete.append(out_file)
1886  if used_existing:
1887  print ('Used existing file {0}.'.format(out_file))
1888  logging.debug('')
1889  log_msg = 'Used existing file {0}.'.format(out_file)
1890  logging.debug(log_msg)
1891  if key.find('l1aextract') != -1:
1892  src_files['level 1a'] = src_files[key]
1893  del src_files['l1aextract']
1894  if 'geo' in src_files:
1895  del src_files['geo']
1896  src_lvls.remove('geo')
1897  # input_files[src_files[src_lvl][0]] = input_files[file]
1898  if len(src_files) > 1 and key != 'geo' :
1899  if key.find('brsgen') != -1:
1900  del src_files[key]
1901  else:
1902  del src_files[src_lvl]
1903  if len(src_files) > 1:
1904  if 'geo' in src_lvls:
1905  del src_files['geo']
1906  src_lvls.remove('geo')
1907  else:
1908  src_lvls = list(src_files.keys())
1909  rules = sensors_sets['general'].recipe
1910  sensor = sensors_sets['general']
1911  if not key in src_files:
1912  src_files[key] = []
1913  for src_lvl in src_lvls:
1914  if not order.index(src_lvl) < order.index('l2gen'):
1915  for file in src_files[src_lvl]:
1916  proc = processor.Processor(sensor, rules, key, section_contents,
1917  cfg_data.hidden_dir)
1918  proc.input_file = file
1919  for program in proc.required_types:
1920  if not program in src_files:
1921  proc1 = processor.Processor(sensor, rules, program, {},
1922  cfg_data.hidden_dir)
1923  proc1.input_file = file
1924  # proc1.deletefiles = cfg_data.deletefiles
1925  for program2 in proc1.required_types:
1926  if program2.find(src_lvl) == -1:
1927  proc2 = processor.Processor(sensor, rules, program2, {},
1928  cfg_data.hidden_dir)
1929  proc2.input_file = file
1930  # proc2.deletefiles = cfg_data.deletefiles
1931  print ('Running {0} on file {1}.'.format(proc2.target_type, proc2.input_file), flush=True)
1932  logging.debug('')
1933  log_msg = 'Processing for {0}:'.format(proc2.target_type)
1934  logging.debug(log_msg)
1935  proc1.input_file, used_existing = exe_processor(proc2, src_files, src_lvl)
1936  if not proc1.input_file:
1937  print ('Skipping processing on file {0}.'.format(file), flush=True)
1938  logging.debug('')
1939  log_msg = 'Skipping processing on file {0}.'.format(file)
1940  logging.debug(log_msg)
1941  continue
1942  if cfg_data.deletefiles and not used_existing:
1943  if proc1.input_file:
1944  files_to_delete.append(proc1.input_file)
1945  if used_existing:
1946  print ('Used existing file {0}.'.format(proc1.input_file))
1947  logging.debug('')
1948  log_msg = 'Used existing file {0}.'.format(proc1.input_file)
1949  logging.debug(log_msg)
1950  if proc1.input_file:
1951  file_input = proc1.input_file
1952  print ('Running {0} on file {1}.'.format(proc1.target_type, proc1.input_file), flush=True)
1953  logging.debug('')
1954  log_msg = 'Processing for {0}:'.format(proc1.target_type)
1955  logging.debug(log_msg)
1956  proc.input_file, used_existing = exe_processor(proc1, src_files, src_lvl)
1957  if not proc.input_file:
1958  print ('Skipping processing on file {0}.'.format(file_input), flush=True)
1959  logging.debug('')
1960  log_msg = 'Skipping processing on file {0}.'.format(file_input)
1961  logging.debug(log_msg)
1962  continue
1963  if cfg_data.deletefiles:
1964  if proc.input_file and not used_existing:
1965  files_to_delete.append(proc.input_file)
1966  if used_existing:
1967  print ('Used existing file {0}.'.format(proc.input_file))
1968  logging.debug('')
1969  log_msg = 'Used existing file {0}.'.format(proc.input_file)
1970  logging.debug(log_msg)
1971  del src_files[src_lvl]
1972  if proc.input_file:
1973  file_input = proc.input_file
1974  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file), flush=True)
1975  logging.debug('')
1976  log_msg = 'Processing for {0}:'.format(proc.target_type)
1977  logging.debug(log_msg)
1978  out_file, used_existing = exe_processor(proc, src_files, program)
1979  if out_file:
1980  src_files[key].append(out_file)
1981  else:
1982  print ('Skipping processing on file {0}.'.format(file_input), flush=True)
1983  logging.debug('')
1984  log_msg = 'Skipping processing on file {0}.'.format(file_input)
1985  logging.debug(log_msg)
1986  break
1987  # continue
1988  if cfg_data.deletefiles and not used_existing and key != last_key:
1989  if out_file and key.find('brsgen') == -1:
1990  files_to_delete.append(out_file)
1991  if used_existing:
1992  print ('Used existing file {0}.'.format(out_file))
1993  logging.debug('')
1994  log_msg = 'Used existing file {0}.'.format(out_file)
1995  logging.debug(log_msg)
1996  if program in src_files:
1997  if proc.target_type.find('brsgen') != -1:
1998  del src_files[proc.target_type]
1999  else:
2000  del src_files[program]
2001  if key.find('l2extract') != -1:
2002  src_files['l2gen'] = src_files[key]
2003  del src_files['l2extract']
2004  if proc.requires_batch_processing:
2005  break
2006  return
2007 
2008 def get_required_programs(target_program, ruleset, lowest_source_level):
2009  """
2010  Returns the programs required too produce the desired final output.
2011  """
2012  programs_to_run = []
2013  cur_rule = ruleset.rules[target_program]
2014  src_types = cur_rule.src_file_types
2015  if src_types[0] == cur_rule.target_type:
2016  programs_to_run = [target_program]
2017  else:
2018  for src_type in src_types:
2019  if src_type in ruleset.rules:
2020  if ruleset.order.index(src_type) > \
2021  ruleset.order.index(lowest_source_level):
2022  programs_to_run.insert(0, src_type)
2023  if len(src_types) > 1:
2024  programs_to_run.insert(0, src_types[1])
2025  programs_to_add = get_required_programs(src_type, ruleset,
2026  lowest_source_level)
2027  for prog in programs_to_add:
2028  programs_to_run.insert(0, prog)
2029  return programs_to_run
2030 
2031 def get_source_geo_files(source_files, proc_src_types, proc_src_ndx):
2032  """
2033  :param source_files: list of source files
2034  :param proc_src_types: list of source types for the processor
2035  :param proc_src_ndx: index into the proc_src_types list pointing to the
2036  source type to use to get the input files
2037  :return: list of GEO files that correspond to the files in source_files
2038  """
2039  inp_files = source_files[proc_src_types[proc_src_ndx]]
2040  geo_files = []
2041  for inp_file in inp_files:
2042  geo_file = find_geo_file(inp_file)
2043  if geo_file:
2044  geo_files.append(geo_file)
2045  else:
2046  err_msg = 'Error! Cannot find GEO ' \
2047  'file {0}.'.format(geo_file)
2048  log_and_exit(err_msg)
2049  return geo_files
2050 
2051 def get_source_file_sets(proc_src_types, source_files, src_key, requires_all_sources):
2052  """
2053  Returns the set of source files needed.
2054  """
2055  if len(proc_src_types) == 1:
2056  try:
2057  src_file_sets = source_files[src_key]
2058  except Exception:
2059  # print "Exception encountered: "
2060  # e_info = sys.exc_info()
2061  # err_msg = ''
2062  # for info in e_info:
2063  # err_msg += " " + str(info)
2064  if DEBUG:
2065  err_msg = get_traceback_message()
2066  log_and_exit(err_msg)
2067  else:
2068  err_msg = 'Error! Unable to determine what source files are required for the specified output files.'
2069  log_and_exit(err_msg)
2070  else:
2071  if requires_all_sources:
2072  if len(proc_src_types) == 2:
2073  if proc_src_types[0] in source_files \
2074  and proc_src_types[1] in source_files:
2075  src_file_sets = list(zip(source_files[proc_src_types[0]],
2076  source_files[proc_src_types[1]]))
2077  else:
2078  if proc_src_types[0] in source_files:
2079  if proc_src_types[1] == 'geo':
2080  geo_files = get_source_geo_files(source_files, proc_src_types, 0)
2081  src_file_sets = list(zip(source_files[proc_src_types[0]],
2082  geo_files))
2083  else:
2084  err_msg = 'Error! Cannot find all {0} and' \
2085  ' {1} source files.'.format(proc_src_types[0],
2086  proc_src_types[1])
2087  log_and_exit(err_msg)
2088  elif proc_src_types[1] in source_files:
2089  if proc_src_types[0] == 'geo':
2090  geo_files = get_source_geo_files(source_files, proc_src_types, 1)
2091  src_file_sets = list(zip(source_files[proc_src_types[1]],
2092  geo_files))
2093  else:
2094  err_msg = 'Error! Cannot find all {0} and' \
2095  ' {1} source files.'.format(proc_src_types[0],
2096  proc_src_types[1])
2097  log_and_exit(err_msg)
2098  else:
2099  err_msg = 'Error! Cannot find all source files.'
2100  log_and_exit(err_msg)
2101  else:
2102  err_msg = 'Error! Encountered too many source file types.'
2103  log_and_exit(err_msg)
2104  else:
2105  for proc_src_type in proc_src_types:
2106  if proc_src_type in source_files:
2107  src_file_sets = source_files[proc_src_type]
2108  return src_file_sets
2109 
2110 def get_source_files(input_files):
2111  """
2112  Returns a dictionary containing the programs to be run (as keys) and the
2113  a list of files on which that program should be run.
2114  """
2115  source_files = {}
2116  for file_path in input_files:
2117  ftype = input_files[file_path][0]
2118  if ftype in source_files:
2119  source_files[ftype].append(file_path)
2120  else:
2121  source_files[ftype] = [file_path]
2122  return source_files
2123 
2124 def get_source_products_types(targt_prod, ruleset):
2125  """
2126  Return the list of source product types needed to produce the final product.
2127  """
2128  src_prod_names = [targt_prod]
2129  targt_pos = ruleset.order.index(targt_prod)
2130  new_prod_names = []
2131  for pos in range(targt_pos, 1, -1):
2132  for prod_name in src_prod_names:
2133  if ruleset.rules[ruleset.order[pos]].target_type == prod_name:
2134  for src_typ in ruleset.rules[ruleset.order[pos]].src_file_types:
2135  new_prod_names.append(src_typ)
2136  src_prod_names += new_prod_names
2137  return src_prod_names
2138 
2140  """
2141  Returns an error message built from traceback data.
2142  """
2143  exc_parts = [str(l) for l in sys.exc_info()]
2144  err_type_parts = str(exc_parts[0]).strip().split('.')
2145  err_type = err_type_parts[-1].strip("'>")
2146  tb_data = traceback.format_exc()
2147  tb_line = tb_data.splitlines()[-3]
2148  line_num = tb_line.split(',')[1]
2149  st_data = traceback.extract_stack()
2150  err_file = os.path.basename(st_data[-1][0])
2151  msg = 'Error! The {0} program encountered an unrecoverable {1}, {2}, at {3} of {4}!'.\
2152  format(cfg_data.prog_name,
2153  err_type, exc_parts[1], line_num.strip(), err_file)
2154  return msg
2155 
2157  """
2158  Initialize sensors.
2159  """
2160  sensors = dict(general=Sensor(),
2161  goci=Sensor_goci(),
2162  hawkeye=Sensor_hawkeye(),
2163  meris=Sensor_meris(),
2164  modis=Sensor_modis(),
2165  seawifs=Sensor_seawifs(),
2166  viirs=Sensor_viirs())
2167  return sensors
2168 
2169 def log_and_exit(error_msg):
2170  """
2171  Record error_msg in the debug log, then exit with error_msg going to stderr
2172  and an exit code of 1; see:
2173  http://docs.python.org/library/sys.html#exit.
2174  """
2175  logging.info(error_msg)
2176  sys.exit(error_msg)
2177 
2178 def main():
2179  """
2180  main processing function.
2181  """
2182  global cfg_data
2183  global DEBUG
2184  # rules_sets = build_rules()
2185  global sensors_sets
2186  sensors_sets = initialze_sensors()
2187  cl_parser = optparse.OptionParser(usage=create_help_message(sensors_sets),
2188  version=' '.join(['%prog', __version__]))
2189  (options, args) = process_command_line(cl_parser)
2190 
2191  if len(args) < 1:
2192  print ("\nError! No file specified for processing.\n")
2193  cl_parser.print_help()
2194  else:
2195  if options.debug:
2196  # Don't just set DEBUG = options.debug, as that would override the
2197  # in-program setting.
2198  DEBUG = True
2199  check_options(options)
2200  # cfg_data = ProcessorConfig('.seadas_data', os.getcwd(),
2201  # options.verbose, options.overwrite,
2202  # options.use_existing, options.tar_file,
2203  # options.timing, options.odir)
2204  cfg_data = ProcessorConfig('.seadas_data', os.getcwd(),
2205  options.verbose, options.overwrite,
2206  options.use_existing,
2207  options.deletefiles, options.odir)
2208  if not os.access(cfg_data.hidden_dir, os.R_OK):
2209  log_and_exit("Error! The working directory is not readable!")
2210  if os.path.exists(args[0]):
2211  log_timestamp = datetime.datetime.today().strftime('%Y%m%d%H%M%S')
2212  start_logging(log_timestamp)
2213  try:
2214  # if cfg_data.timing:
2215  # main_timer = benchmark_timer.BenchmarkTimer()
2216  # main_timer.start()
2217  # do_processing(sensors_sets, args[0])
2218  # main_timer.end()
2219  # timing_msg = 'Total processing time: {0}'.format(
2220  # str(main_timer.get_total_time_str()))
2221  # print (timing_msg)
2222  # logging.info(timing_msg)
2223  # else:
2224  if options.ifile:
2225  do_processing(sensors_sets, args[0], options.ifile)
2226  else:
2227  do_processing(sensors_sets, args[0])
2228  except Exception:
2229  if DEBUG:
2230  err_msg = get_traceback_message()
2231  log_and_exit(err_msg)
2232  else:
2233  # todo: make a friendlier error message
2234  err_msg = 'Unanticipated error encountered during processing!'
2235  log_and_exit(err_msg)
2236  else:
2237  err_msg = 'Error! Parameter file {0} does not exist.'.\
2238  format(args[0])
2239  sys.exit(err_msg)
2240  logging.shutdown()
2241  return 0
2242 
2243 def process_command_line(cl_parser):
2244  """
2245  Get arguments and options from the calling command line.
2246  To be consistent with other OBPG programs, an underscore ('_') is used for
2247  multiword options, instead of a dash ('-').
2248  """
2249  cl_parser.add_option('--debug', action='store_true', dest='debug',
2250  default=False, help=optparse.SUPPRESS_HELP)
2251  cl_parser.add_option('-d', '--deletefiles', action='store_true',
2252  dest='deletefiles', default=False,
2253  help='delete files created during processing')
2254  cl_parser.add_option('--ifile', action='store', type='string',
2255  dest='ifile', help="input file")
2256  cl_parser.add_option('--output_dir', '--odir',
2257  action='store', type='string', dest='odir',
2258  help="user specified directory for output")
2259  cl_parser.add_option('--overwrite', action='store_true',
2260  dest='overwrite', default=False,
2261  help='overwrite files which already exist (default = stop processing if file already exists)')
2262  # cl_parser.add_option('-t', '--tar', type=str, dest='tar_file',
2263  # help=optparse.SUPPRESS_HELP)
2264  # cl_parser.add_option('--timing', dest='timing', action='store_true',
2265  # default=False,
2266  # help='report time required to run each program and total')
2267  cl_parser.add_option('--use_existing', action='store_true',
2268  dest='use_existing', default=False,
2269  help='use files which already exist (default = stop processing if file already exists)')
2270  cl_parser.add_option('-v', '--verbose',
2271  action='store_true', dest='verbose', default=False,
2272  help='print status messages to stdout')
2273 
2274  (options, args) = cl_parser.parse_args()
2275  for ndx, cl_arg in enumerate(args):
2276  if cl_arg.startswith('par='):
2277  args[ndx] = cl_arg.lstrip('par=')
2278  if options.overwrite and options.use_existing:
2279  log_and_exit('Error! Options overwrite and use_existing cannot be ' + \
2280  'used simultaneously.')
2281  return options, args
2282 
2283 def read_file_list_file(flf_name):
2284  """
2285  Reads flf_name and returns the list of files to be processed.
2286  """
2287  files_list = []
2288  bad_lines = []
2289  with open(flf_name, 'rt') as flf:
2290  inp_lines = flf.readlines()
2291  for line in inp_lines:
2292  fname = line.split('#')[0].strip()
2293  if fname != '':
2294  if os.path.exists(fname):
2295  files_list.append(fname)
2296  else:
2297  bad_lines.append(fname)
2298  if len(bad_lines) > 0:
2299  err_msg = 'Error! File {0} specified the following input files which could not be located:\n {1}'.\
2300  format(flf_name, ', '.join([bl for bl in bad_lines]))
2301  log_and_exit(err_msg)
2302  return files_list
2303 
2304 def run_batch_processor(processor, file_set):
2305  """
2306  Run a processor, e.g. l2bin, which processes batches of files.
2307  """
2308  # logging.debug('in run_batch_processor, ndx = %d', ndx)
2309  if os.path.exists((file_set[0])) and tarfile.is_tarfile(file_set[0]):
2310  processor.input_file = file_set[0]
2311  else:
2312  timestamp = time.strftime('%Y%m%d_%H%M%S', time.gmtime(time.time()))
2313  file_list_name = cfg_data.hidden_dir + os.sep + 'files_' + \
2314  processor.target_type + '_' + timestamp + '.lis'
2315  with open(file_list_name, 'wt') as file_list:
2316  for fname in file_set:
2317  file_list.write(fname + '\n')
2318  processor.input_file = file_list_name
2319  data_file_list = []
2320  finder_opts = {}
2321  for fspec in file_set:
2322  dfile = get_obpg_data_file_object(fspec)
2323  data_file_list.append(dfile)
2324  if 'suite' in processor.par_data:
2325  finder_opts['suite'] = processor.par_data['suite']
2326  elif 'prod' in processor.par_data:
2327  finder_opts['suite'] = processor.par_data['prod']
2328  if 'resolution' in processor.par_data:
2329  finder_opts['resolution'] = processor.par_data['resolution']
2330  if 'oformat' in processor.par_data:
2331  finder_opts['oformat'] = processor.par_data['oformat']
2332  # name_finder = name_finder_utils.get_level_finder(data_file_list,
2333  # processors[ndx].target_type,
2334  # finder_opts)
2335  if processor.output_file:
2336  processor.output_file = os.path.join(processor.out_directory,
2337  processor.output_file )
2338  else:
2339  processor.output_file = os.path.join(processor.out_directory,
2341  processor.target_type,
2342  finder_opts))
2343  if DEBUG:
2344  log_msg = "Running {0} with input file {1} to generate {2} ".\
2345  format(processor.target_type,
2346  processor.input_file,
2347  processor.output_file)
2348  logging.debug(log_msg)
2349  processor.execute()
2350  return processor.output_file
2351 
2352 def run_nonbatch_processor(processor):
2353  """
2354  Run a processor which deals with single input files (or pairs of files in
2355  the case of MODIS L1B processing in which GEO files are also needed).
2356  """
2357  # if isinstance(file_set, tuple):
2358  # input_file = file_set[0]
2359  # geo_file = file_set[1]
2360  # else:
2361  # input_file = file_set
2362  # geo_file = None
2363  used_existing = False
2364  dfile = get_obpg_data_file_object(processor.input_file)
2365 
2366  cl_opts = optparse.Values()
2367  if 'suite' in processor.par_data:
2368  cl_opts.suite = processor.par_data['suite']
2369  elif 'prod' in processor.par_data:
2370  cl_opts.suite = processor.par_data['prod']
2371  else:
2372  cl_opts.suite = None
2373  if 'resolution' in processor.par_data:
2374  cl_opts.resolution = processor.par_data['resolution']
2375  else:
2376  cl_opts.resolution = None
2377  if 'oformat' in processor.par_data:
2378  cl_opts.oformat = processor.par_data['oformat']
2379  else:
2380  cl_opts.oformat = None
2381  # name_finder = name_finder_utils.get_level_finder([dfile],
2382  # processors[ndx].target_type,
2383  # cl_opts)
2384  if processor.output_file:
2385  output_file = os.path.join(processor.out_directory, processor.output_file)
2386  else:
2387  output_file = os.path.join(processor.out_directory,
2388  mlp.get_output_name_utils.get_output_name([dfile], processor.target_type, cl_opts))
2389  if DEBUG:
2390  print ('in run_nonbatch_processor, output_file = ' + output_file)
2391  # processor.input_file = input_file
2392  processor.output_file = output_file
2393  # processor.geo_file = geo_file
2394  # if 'deletefiles' in processor.par_data:
2395  # if processor.par_data['deletefiles']: # != 0:
2396  # if processor.par_data['deletefiles'] == 1:
2397  # processor.deletefiles = True
2398  # else:
2399  # processor.deletefiles = False
2400  if (not os.path.exists(output_file)) or cfg_data.overwrite:
2401  if cfg_data.verbose:
2402  print ()
2403  print ('\nRunning ' + str(processor))
2404  sys.stdout.flush()
2405  proc_status = processor.execute()
2406 
2407  if proc_status:
2408  output_file = None
2409  # print ("Error! Status {0} was returned during {1} {2} processing.".\
2410  # format(proc_status, processor.sensor.name, processor.target_type))
2411  msg = "Error! Status {0} was returned during {1} {2} processing.".\
2412  format(proc_status, processor.sensor.name,
2413  processor.target_type)
2414  # log_and_exit(msg)
2415  logging.debug(msg)
2416  # Todo: remove the failed file from future processing
2417  if output_file:
2418  output_file = ''
2419  elif not cfg_data.use_existing:
2420  log_and_exit('Error! Target file {0} already exists.'.\
2421  format(output_file))
2422  else:
2423  used_existing = True
2424  processor.input_file = ''
2425  processor.output_file = ''
2426  return output_file, used_existing
2427 
2428 def run_script(proc, script_name):
2429  """
2430  Build the command to run the processing script which is passed in.
2431  """
2432  prog = build_executable_path(script_name)
2433  args = ['ifile={}'.format(proc.input_file),'ofile={}'.format(proc.output_file)]
2434  args.append(get_options(proc.par_data))
2435  cmd = [prog]
2436  cmd.extend(args)
2437  # cmd = [prog, args]
2438  logging.debug('\nRunning: {}'.format(" ".join(str(x) for x in cmd)))
2439  # logging.debug("\nRunning: " + cmd)
2440  return execute_command(cmd)
2441 
2442 
2443 def start_logging(time_stamp):
2444  """
2445  Opens log file(s) for debugging.
2446  """
2447  info_log_name = ''.join(['Processor_', time_stamp, '.log'])
2448  debug_log_name = ''.join(['multilevel_processor_debug_', time_stamp,
2449  '.log'])
2450  info_log_path = os.path.join(cfg_data.output_dir, info_log_name)
2451  debug_log_path = os.path.join(cfg_data.output_dir, debug_log_name)
2452  mlp_logger = logging.getLogger()
2453  #mlp_logger.setLevel(logging.DEBUG)
2454 
2455  info_hndl = logging.FileHandler(info_log_path)
2456  info_hndl.setLevel(logging.INFO)
2457  mlp_logger.addHandler(info_hndl)
2458 
2459  if DEBUG:
2460  debug_hndl = logging.FileHandler(debug_log_path)
2461  debug_hndl.setLevel(logging.DEBUG)
2462  mlp_logger.addHandler(debug_hndl)
2463  logging.debug('Starting ' + os.path.basename(sys.argv[0]) + ' at ' +
2464  datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
2465 
2466 def uniqify_list(orig_list):
2467  """
2468  Returns a list with no duplicates. Somewhat borrowed from:
2469  http://www.peterbe.com/plog/uniqifiers-benchmark (example f5)
2470  """
2471  uniqified_list = []
2472  seen_items = {}
2473  for item in orig_list:
2474  if item not in seen_items:
2475  seen_items[item] = 1
2476  uniqified_list.append(item)
2477  return uniqified_list
2478 
2479 
2480 
2481 DEBUG = False
2482 #DEBUG = True
2483 
2484 cfg_data = None
2485 FILE_USE_OPTS = ['deletefiles', 'overwrite', 'use_existing']
2486 SUFFIXES = {
2487  'geo': 'GEO',
2488  'l1brsgen': 'L1B_BRS',
2489  'l1aextract': 'L1A.sub',
2490  'l1aextract_viirs': 'L1A.sub',
2491  'l1aextract_seawifs': 'L1A.sub',
2492  'l1aextract_modis': 'L1A.sub',
2493  # 'l1mapgen': 'L1B_MAP',
2494  'l2bin': 'L3b',
2495  'l2brsgen': 'L2_BRS',
2496  'l2extract': 'L2.sub',
2497  'l2gen': 'L2',
2498  # 'l2mapgen': 'L2B_MAP',
2499  'l3bin': 'L3b',
2500  'l3mapgen': 'L3m',
2501  'level 1a': 'L1A',
2502  'level 1b': 'L1B_LAC',
2503  # 'smigen': 'SMI'
2504 }
2505 input_file_data = {}
2506 #verbose = False
2507 
2508 if os.environ['OCSSWROOT']:
2509  OCSSWROOT_DIR = os.environ['OCSSWROOT']
2510  logging.debug('OCSSWROOT -> %s', OCSSWROOT_DIR)
2511 else:
2512  sys.exit('Error! Cannot find OCSSWROOT environment variable.')
2513 
2514 if __name__ == "__main__":
2515  sys.exit(main())
def get_source_products_types(targt_prod, ruleset)
def run_script(proc, script_name)
def get_processors2(sensor, par_contents, rules, lowest_source_level)
def get_input_files_type_data(input_files_list)
list(APPEND LIBS ${PGSTK_LIBRARIES}) add_executable(atteph_info_modis atteph_info_modis.c) target_link_libraries(atteph_info_modis $
Definition: CMakeLists.txt:7
def __init__(self, hidden_dir, ori_dir, verbose, overwrite, use_existing, deletefiles, out_dir=None)
def get_intermediate_products(existing_prod_names, ruleset, lowest_source_level)
def get_output_name3(input_name, input_files, suffix)
def get_intermediate_processors(sensor, existing_procs, rules, lowest_source_level)
def get_batch_output_name(file_set, suffix)
def find_geo_file2(inp_file, instrument, lvl)
def get_par_file_contents(par_file, acceptable_single_keys)
def get_output_name2(inp_files, targ_prog, suite=None, oformt=None, res=None)
def build_file_list_file(filename, file_list)
def build_executable_path(prog_name)
def create_help_message(rules_sets)
def find_viirs_geo_file(proc, first_svm_file)
def clean_files(delete_list)
def get_obpg_data_file_object(file_specification)
def get_source_file_sets(proc_src_types, source_files, src_key, requires_all_sources)
def do_processing(sensors_sets, par_file, cmd_line_ifile=None)
def get_required_programs(target_program, ruleset, lowest_source_level)
def get_processors(src_files, input_files, par_contents, files_to_delete)
def run_nonbatch_processor(processor)
const char * str
Definition: l1c_msi.cpp:35
def get_output_name(data_files, target_program, clopts)
def get_data_file_option(par_contents, opt_text)
def create_levels_list(rules_sets)
def extract_par_section(par_contents, section)
def get_lowest_source_level(source_files)
def get_source_files(input_files)
def run_batch_processor(processor, file_set)
def exe_processor(proc, src_files, src_lvl)
def process_command_line(cl_parser)
def get_file_handling_opts(par_contents)
def get_source_geo_files(source_files, proc_src_types, proc_src_ndx)
def build_l2gen_par_file(par_contents, input_file, geo_file, output_file)