4 Program to perform multilevel processing (previously known as the
5 seadas_processor and sometimes referred to as the 'uber' processor).
11 import ConfigParser
as configparser
24 import get_output_name
39 __author__ =
'melliott'
43 Configuration data for the program which needs to be widely available.
46 def __init__(self, hidden_dir, ori_dir, verbose, overwrite, use_existing,
47 deletefiles, out_dir=None):
48 self.
prog_name = os.path.basename(sys.argv[0])
50 if not os.path.exists(hidden_dir):
54 if sys.exc_info()[1].find(
'Permission denied:') != -1:
73 cfg_file_path = os.path.join(self.
hidden_dir,
'seadas_ocssw.cfg')
74 if os.path.exists(cfg_file_path):
79 ProcessorConfig._instance = self
81 def _read_saved_options(self, cfg_path):
83 Gets options stored in the program's configuration file.
86 cfg_parser = configparser.ConfigParser()
87 cfg_parser.read(cfg_path)
90 int(cfg_parser.get(
'main',
93 except configparser.NoSectionError
as nse:
94 print (
'nse: ' +
str(nse))
95 print (
'sys.exc_info(): ')
96 for msg
in sys.exc_info():
97 print (
' ' +
str(msg))
98 log_and_exit(
'Error! Configuration file has no "main" ' +
100 except configparser.NoOptionError:
101 log_and_exit(
'Error! The "main" section of the configuration ' +
102 'file does not specify a "par_file_age".')
103 except configparser.MissingSectionHeaderError:
104 log_and_exit(
'Error! Bad configuration file, no section headers ' +
107 def _set_temp_dir(self):
109 Sets the value of the temporary directory.
111 if os.path.exists(
'/tmp')
and os.path.isdir(
'/tmp')
and \
112 os.access(
'/tmp', os.W_OK):
116 if os.path.exists(cwd)
and os.path.isdir(cwd)
and \
117 os.access(cwd, os.W_OK):
120 log_and_exit(
'Error! Unable to establish a temporary ' +
123 def _write_default_cfg_file(self, cfg_path):
125 Writes out a configuration file using default values.
127 with open(cfg_path,
'wt')
as cfg_file:
128 cfg_file.write(
'[main]\n')
129 cfg_file.write(
'par_file_age=30 # units are days\n')
133 Sensor contains the recipe and procssing method for general sensors.
138 'level 1a': processing_rules.build_rule(
'level 1a', [
'level 0'],
140 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'l1'],
142 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
144 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'l1'],
146 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
148 'level 1b': processing_rules.build_rule(
'level 1b', [
'level 1a'],
150 'l2gen': processing_rules.build_rule(
'l2gen', [
'l1'], self.
run_l2gen,
152 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
154 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
156 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
158 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
160 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
163 self.
rules_order = [
'level 1a',
'l1brsgen',
'l1mapgen',
'level 1b',
'l2gen',
164 'l2extract',
'l2brsgen',
'l2mapgen',
'l2bin',
'l3bin',
165 'l3mapgen',
'smigen']
166 self.
name =
'general'
173 Exits with an error message when there is an attempt to process a source
174 file at the lowest level of a rule chain.
176 err_msg =
'Error! Attempting to create {0} product, but no creation program is known.'.format(proc.target_type)
181 Sets up and runs an executable program.
184 prog = os.path.join(proc.ocssw_bin,
'l1bgen_generic')
185 args =
'ifile=' + proc.input_file +
' '
186 args +=
'ofile=' + proc.output_file +
' '
187 if not proc.geo_file
is None:
188 args += proc.geo_file +
' '
190 cmd =
' '.join([prog, args])
195 Runs the l1brsgen executable.
201 prog = os.path.join(proc.ocssw_bin,
'l1brsgen')
227 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file])
228 if not proc.geo_file
is None:
229 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file,
230 'geofile=' + proc.geo_file,
231 'ofile=' + proc.output_file])
233 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file,
234 'ofile=' + proc.output_file])
235 logging.debug(
'Executing: "%s"', cmd)
241 Runs the l1mapgen executable, handling the range of successful return
251 prog = os.path.join(proc.ocssw_bin,
'l1mapgen')
253 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file])
254 if not proc.geo_file
is None:
255 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file,
256 'geofile=' + proc.geo_file,
257 'ofile=' + proc.output_file])
259 cmd =
' '.join([prog, opts,
' ifile=' + proc.input_file,
260 'ofile=' + proc.output_file])
261 logging.debug(
'Executing: "%s"', cmd)
263 logging.debug(
'l1mapgen run complete! Return value: "%s"', lvl_nm)
264 if (lvl_nm >= acceptable_min)
and (lvl_nm <= acceptable_max):
271 Set up for and perform L2 binning.
273 prog = os.path.join(proc.ocssw_bin,
'l2bin')
274 if not os.path.exists(prog):
275 print (
"Error! Cannot find executable needed for {0}".\
276 format(proc.rule_set.rules[proc.target_type].action))
277 args =
'infile=' + proc.input_file
278 args +=
' ofile=' + proc.output_file
280 cmd =
' '.join([prog, args])
281 logging.debug(
'Running l2bin cmd: ' + cmd)
283 print (
'l2bin cmd: ' + cmd)
286 if os.path.exists(proc.output_file):
287 msg =
'-I- The l2bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(ret_val, proc.output_file)
291 msg =
'-I- The l2bin program produced a bin file with no data. No further processing will be done.'
297 Runs the l2brsgen executable.
299 logging.debug(
"In run_l2brsgen")
300 prog = os.path.join(proc.ocssw_bin,
'l2brsgen')
302 cmd =
' '.join([prog, opts,
'ifile='+proc.input_file,
303 'ofile=' + proc.output_file])
304 logging.debug(
'Executing: "%s"', cmd)
310 Set up and run l2extract.
312 if 'SWlon' in proc.par_data
and 'SWlat' in proc.par_data
and \
313 'NElon' in proc.par_data
and 'NElat' in proc.par_data:
315 if (start_line
is None)
or (end_line
is None)
or (start_pixel
is None) \
316 or (end_pixel
is None):
317 err_msg =
'Error! Could not compute coordinates for l2extract.'
319 l2extract_prog = os.path.join(proc.ocssw_bin,
'l2extract')
320 l2extract_cmd =
' '.join([l2extract_prog, proc.input_file,
321 str(start_pixel),
str(end_pixel),
322 str(start_line),
str(end_line),
'1',
'1',
324 logging.debug(
'Executing l2extract command: "%s"', l2extract_cmd)
328 err_msg =
'Error! Geographical coordinates not specified for l2extract.'
333 Set up for and perform L2 processing.
337 getanc_cmd =
' '.join([getanc_prog, proc.input_file])
338 logging.debug(
'running getanc command: ' + getanc_cmd)
340 l2gen_prog = os.path.join(proc.ocssw_bin,
'l2gen')
341 if not os.path.exists(l2gen_prog):
342 print (
"Error! Cannot find executable needed for {0}".\
343 format(proc.rule_set.rules[proc.target_type].action))
345 proc.geo_file, proc.output_file)
346 logging.debug(
'L2GEN_FILE=' + proc.output_file)
348 args =
'par=' + par_name
349 l2gen_cmd =
' '.join([l2gen_prog, args])
350 if cfg_data.verbose
or DEBUG:
351 logging.debug(
'l2gen cmd: %s', l2gen_cmd)
356 Runs the l2mapgen executable.
358 prog = os.path.join(proc.ocssw_bin,
'l2mapgen')
359 args =
'ifile=' + proc.input_file
360 for key
in proc.par_data:
361 if (key !=
'odir')
and (key !=
'ofile')
and not key.lower()
in FILE_USE_OPTS:
362 args +=
' ' + key +
'=' + proc.par_data[key]
363 args +=
' ofile=' + proc.output_file
364 cmd =
' '.join([prog, args])
365 logging.debug(
'Executing: "%s"', cmd)
367 logging.debug(
"l2mapgen run complete with status " +
str(status))
376 Set up and run the l3Bin program
378 prog = os.path.join(proc.ocssw_bin,
'l3bin')
379 if not os.path.exists(prog):
380 print (
"Error! Cannot find executable needed for {0}".\
381 format(proc.rule_set.rules[proc.target_type].action))
382 args =
'ifile=' + proc.input_file
383 for key
in proc.par_data:
384 if (key !=
'odir')
and (key !=
'ofile')
and not key.lower()
in FILE_USE_OPTS:
385 args +=
' ' + key +
'=' + proc.par_data[key]
386 args =
'in=' + proc.input_file
387 args +=
' ' +
"out=" + proc.output_file
390 cmd =
' '.join([prog, args])
391 logging.debug(
'Executing l3bin command: "%s"', cmd)
394 if os.path.exists(proc.output_file):
395 msg =
'-I- The l3bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(
396 ret_val, proc.output_file)
400 msg =
"-I- The l3bin program produced a bin file with no data. No further processing will be done."
406 Set up and run the l3mapgen program.
408 prog = os.path.join(proc.ocssw_bin,
'l3mapgen')
409 if not os.path.exists(prog):
410 print (
"Error! Cannot find executable needed for {0}".\
411 format(proc.rule_set.rules[proc.target_type].action))
412 args =
'ifile=' + proc.input_file
413 for key
in proc.par_data:
414 if (key !=
'odir')
and (key !=
'ofile')
and not key.lower()
in FILE_USE_OPTS:
415 args +=
' ' + key +
'=' + proc.par_data[key]
416 args +=
' ofile=' + proc.output_file
417 cmd =
' '.join([prog, args])
418 logging.debug(
'Executing l3mapgen command: "%s"', cmd)
423 Set up for and perform SMI (Standard Mapped Image) generation.
426 prog = os.path.join(proc.ocssw_bin,
'smigen')
427 if not os.path.exists(prog):
428 print (
"Error! Cannot find executable needed for {0}".\
429 format(proc.rule_set.rules[proc.target_type].action))
430 if 'prod' in proc.par_data:
431 args =
'ifile=' + proc.input_file +
' ofile=' + proc.output_file + \
432 ' prod=' + proc.par_data[
'prod']
433 cmd =
' '.join([prog, args])
434 for key
in proc.par_data:
435 if (key !=
'prod')
and not (key.lower()
in FILE_USE_OPTS):
436 args +=
' ' + key +
'=' + proc.par_data[key]
437 logging.debug(
'\nRunning smigen command: ' + cmd)
440 err_msg =
'Error! No product specified for smigen.'
446 Sensor GOCI contains the GOCI recipe.
451 'level 1a': processing_rules.build_rule(
'level 1a', [
'level 0'],
453 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'l1'],
455 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
457 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'l1'],
459 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
461 'level 1b': processing_rules.build_rule(
'level 1b', [
'level 1a'],
463 'l2gen': processing_rules.build_rule(
'l2gen', [
'level 1b'], self.
run_l2gen,
465 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
467 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
469 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
471 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
473 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
476 self.
rules_order = [
'level 1a',
'l1brsgen',
'l1mapgen',
'level 1b',
'l2gen',
477 'l2extract',
'l2brsgen',
'l2mapgen',
'l2bin',
'l3bin',
478 'l3mapgen',
'smigen']
490 'level 1a': processing_rules.build_rule(
'level 1a', [
'nothing lower'],
492 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'level 1a',
'geo'],
494 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'level 1a',
'geo'],
496 'geo': processing_rules.build_rule(
'geo', [
'level 1a'],
498 'l2gen': processing_rules.build_rule(
'l2gen', [
'level 1a',
'geo'],
500 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
502 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
504 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
506 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
508 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
510 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
512 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
516 'l1mapgen',
'l2gen',
'l2extract',
'l2bin',
517 'l2brsgen',
'l2mapgen',
'l3bin',
'l3mapgen',
'smigen']
524 Set up and run the geolocate_hawkeye program, returning the exit status of the run.
526 logging.debug(
'In run_geolocate_hawkeye')
532 err_msg =
'Error! Cannot find program geolocate_hawkeye.'
533 logging.info(err_msg)
535 args =
''.join([proc.input_file,
' ', proc.output_file])
537 cmd =
' '.join([prog, args])
538 logging.debug(
"\nRunning: " + cmd)
543 Sensor MERIS contains MERIS specific recipe.
546 target type (string), source types (list of strings), batch processing
547 flag (Boolean), action to take (function name)
552 'level 1a': processing_rules.build_rule(
'level 1a', [
'level 0'],
554 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'l1'],
556 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
558 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'l1'],
560 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
562 'level 1b': processing_rules.build_rule(
'level 1b', [
'level 1a'],
564 'l2gen': processing_rules.build_rule(
'l2gen', [
'level 1b'], self.
run_l2gen,
566 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
568 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
570 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
572 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
574 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
577 self.
rules_order = [
'level 1a',
'l1brsgen',
'l1mapgen',
'level 1b',
'l2gen',
578 'l2extract',
'l2brsgen',
'l2mapgen',
'l2bin',
'l3bin',
579 'l3mapgen',
'smigen']
586 Sensor MODIS contains MODIS specific recipe and processing methos.
591 'level 0': processing_rules.build_rule(
'level 0', [
'nothing lower'],
593 'level 1a': processing_rules.build_rule(
'level 1a', [
'level 0'],
595 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'level 1b',
'geo'],
597 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'level 1b',
'geo'],
599 'geo': processing_rules.build_rule(
'geo', [
'level 1a'], self.
run_geo,
601 'l1aextract': processing_rules.build_rule(
'l1aextract',
605 'level 1b': processing_rules.build_rule(
'level 1b',
608 'l2gen': processing_rules.build_rule(
'l2gen', [
'level 1b',
'geo'],
610 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
612 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
614 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
616 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
618 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
620 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
622 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
626 'level 1b',
'l1brsgen',
'l1mapgen',
'l2gen',
'l2extract',
627 'l2bin',
'l2brsgen',
'l2mapgen',
'l3bin',
'l3mapgen',
635 Set up and run l1aextract_modis.
637 if 'SWlon' in proc.par_data
and 'SWlat' in proc.par_data
and\
638 'NElon' in proc.par_data
and 'NElat' in proc.par_data:
640 if (start_line
is None)
or (end_line
is None)
or (start_pixel
is None)\
641 or (end_pixel
is None):
642 err_msg =
'Error! Cannot find l1aextract_modis coordinates.'
644 l1aextract_prog = os.path.join(proc.ocssw_bin,
'l1aextract_modis')
645 l1aextract_cmd =
' '.join([l1aextract_prog, proc.input_file,
646 str(start_pixel),
str(end_pixel),
647 str(start_line),
str(end_line),
649 logging.debug(
'Executing l1aextract_modis command: "%s"',
656 Sets up and runs the MODIS GEO script.
660 args = proc.input_file +
' --output=' + proc.output_file
662 cmd =
' '.join([prog, args])
663 logging.debug(
"\nRunning: " + cmd)
668 Sets up and runs the MODIS L1A script.
671 args = proc.input_file
672 args +=
' --output=' + proc.output_file
674 cmd =
' '.join([prog, args])
675 logging.debug(
"\nRunning: " + cmd)
683 args =
' -o ' + proc.output_file
687 args +=
' ' + proc.input_file
688 if not proc.geo_file
is None:
689 args +=
' ' + proc.geo_file
690 cmd =
' '.join([prog, args])
691 logging.debug(
"\nRunning: " + cmd)
696 Sensor SeaWiFS contains SeaWiFS sepcific recipe and processing method.
701 'level 1a': processing_rules.build_rule(
'level 1a', [
'level 0'],
703 'l1aextract': processing_rules.build_rule(
'l1aextract',
707 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'l1'],
709 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'l1'],
711 'level 1b': processing_rules.build_rule(
'level 1b', [
'level 1a'],
713 'l2gen': processing_rules.build_rule(
'l2gen', [
'l1'], self.
run_l2gen,
715 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
717 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
719 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
721 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
723 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
725 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
727 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
731 'l1mapgen',
'level 1b',
'l2gen',
'l2extract',
732 'l2brsgen',
'l2mapgen',
'l2bin',
'l3bin',
733 'l3mapgen',
'smigen']
740 Set up and run l1aextract_seawifs.
742 if 'SWlon' in proc.par_data
and 'SWlat' in proc.par_data
and\
743 'NElon' in proc.par_data
and 'NElat' in proc.par_data:
745 if (start_line
is None)
or (end_line
is None)
or (start_pixel
is None)\
746 or (end_pixel
is None):
747 err_msg =
'Error! Cannot compute l1aextract_seawifs coordinates.'
749 l1aextract_prog = os.path.join(proc.ocssw_bin,
'l1aextract_seawifs')
750 l1aextract_cmd =
' '.join([l1aextract_prog, proc.input_file,
751 str(start_pixel),
str(end_pixel),
752 str(start_line),
str(end_line),
'1',
'1',
754 logging.debug(
'Executing l1aextract_seawifs command: "%s"',
766 'level 1a': processing_rules.build_rule(
'level 1a', [
'nothing lower'],
768 'l1brsgen': processing_rules.build_rule(
'l1brsgen', [
'l1',
'geo'],
770 'l1mapgen': processing_rules.build_rule(
'l1mapgen', [
'l1',
'geo'],
772 'geo': processing_rules.build_rule(
'geo', [
'level 1a'],
774 'l1aextract': processing_rules.build_rule(
'l1aextract',
778 'level 1b': processing_rules.build_rule(
'level 1b', [
'level 1a',
'geo'],
780 'l2gen': processing_rules.build_rule(
'l2gen', [
'l1',
'geo'],
782 'l2extract': processing_rules.build_rule(
'l2extract', [
'l2gen'],
784 'l2brsgen': processing_rules.build_rule(
'l2brsgen', [
'l2gen'],
786 'l2mapgen': processing_rules.build_rule(
'l2mapgen', [
'l2gen'],
788 'l2bin': processing_rules.build_rule(
'l2bin', [
'l2gen'], self.
run_l2bin,
790 'l3bin': processing_rules.build_rule(
'l3bin', [
'l2bin'], self.
run_l3bin,
792 'l3mapgen': processing_rules.build_rule(
'l3mapgen', [
'l2bin'],
794 'smigen': processing_rules.build_rule(
'smigen', [
'l3bin'], self.
run_smigen,
797 self.
rules_order = [
'level 1a',
'geo',
'l1aextract',
'level 1b',
'l1brsgen',
798 'l1mapgen',
'l2gen',
'l2extract',
'l2bin',
799 'l2brsgen',
'l2mapgen',
'l3bin',
'l3mapgen',
'smigen']
806 Set up and run the geolocate_viirs program, returning the exit status of the run.
808 logging.debug(
'In run_geolocate_viirs')
814 err_msg =
'Error! Cannot find program geolocate_viirs.'
815 logging.info(err_msg)
817 args =
''.join([
'-ifile=', proc.input_file,
' -geofile_mod=', proc.output_file])
819 cmd =
' '.join([prog, args])
820 logging.debug(
"\nRunning: " + cmd)
824 logging.debug(
'In run_viirs_l1b')
828 args =
''.join([
'ifile=', proc.input_file,
' l1bfile_mod=', proc.output_file])
836 cmd =
' '.join([prog, args])
837 logging.debug(
"\nRunning: " + cmd)
842 Set up and run l1aextract_viirs.
844 if 'SWlon' in proc.par_data
and 'SWlat' in proc.par_data
and\
845 'NElon' in proc.par_data
and 'NElat' in proc.par_data:
847 elif 'sline' in proc.par_data
and 'eline' in proc.par_data
and\
848 'spixl' in proc.par_data
and 'epixl' in proc.par_data:
849 start_line = proc.par_data[
'sline']
850 end_line = proc.par_data[
'eline']
851 start_pixel = proc.par_data[
'spixl']
852 end_pixel = proc.par_data[
'epixl']
854 if (start_line
is None)
or (end_line
is None)
or (start_pixel
is None)\
855 or (end_pixel
is None):
856 err_msg =
'Error! Cannot find l1aextract_viirs coordinates.'
858 l1aextract_prog = os.path.join(proc.ocssw_bin,
'l1aextract_viirs')
859 l1aextract_cmd =
' '.join([l1aextract_prog, proc.input_file,
860 str(start_pixel),
str(end_pixel),
861 str(start_line),
str(end_line),
863 logging.debug(
'Executing l1aextract_viirs command: "%s"',
870 Returns an obpg_data_file object for the file named in file_specification.
873 (ftype, sensor) = ftyper.get_file_type()
874 (stime, etime) = ftyper.get_file_times()
875 obpg_data_file_obj = obpg_data_file.ObpgDataFile(file_specification, ftype,
876 sensor, stime, etime,
878 return obpg_data_file_obj
882 Returns the directory in which the program named in prog_name is found.
883 None is returned if the program is not found.
886 candidate_subdirs = [
'bin',
'scripts']
887 for subdir
in candidate_subdirs:
888 cand_path = os.path.join(OCSSWROOT_DIR, subdir, prog_name)
889 if os.path.exists(cand_path):
896 Create a file listing the names of the files to be processed.
898 with open(filename,
'wt')
as file_list_file:
899 for fname
in file_list:
900 file_list_file.write(fname +
'\n')
904 Build the parameter file for L2 processing.
906 dt_stamp = datetime.datetime.today()
907 par_name =
''.join([
'L2_', dt_stamp.strftime(
'%Y%m%d%H%M%S'),
'.par'])
908 par_path = os.path.join(cfg_data.hidden_dir, par_name)
909 with open(par_path,
'wt')
as par_file:
910 par_file.write(
'# Automatically generated par file for l2gen\n')
911 par_file.write(
'ifile=' + input_file +
'\n')
912 if not geo_file
is None:
913 par_file.write(
'geofile=' + geo_file +
'\n')
914 par_file.write(
'ofile=' + output_file +
'\n')
915 for l2_opt
in par_contents:
916 if l2_opt !=
'ifile' and l2_opt !=
'geofile' \
917 and l2_opt !=
'ofile' and l2_opt !=
'odir' \
918 and not l2_opt
in FILE_USE_OPTS:
919 par_file.write(l2_opt +
'=' + par_contents[l2_opt] +
'\n')
924 Check command line options
932 if not os.path.exists(options.ifile):
933 err_msg =
'Error! The specified input file, {0}, does not exist.'. \
934 format(options.ifile)
939 Delete unwanted files created during processing.
942 print (
"Cleaning up files")
948 for filepath
in delete_list:
950 print (
'Deleting {0}'.format(filepath))
955 hidden_files = os.listdir(cfg_data.hidden_dir)
956 par_files = [f
for f
in hidden_files
if f.endswith(
'.par')]
957 for par_file
in par_files:
958 par_path = os.path.join(cfg_data.hidden_dir, par_file)
959 file_age = round(time.time()) - os.path.getmtime(par_path)
960 if file_age > cfg_data.max_file_age:
962 print (
'Deleting {0}'.format(par_path))
967 if not files_deleted:
968 print (
'No files were found for deletion.')
970 elif files_deleted == 1:
971 print (
'One file was deleted.')
974 print (
'A total of {0} files were deleted.'.format(files_deleted))
979 Returns a list containing all the levels from all the rules sets.
981 set_key =
list(rules_sets.keys())[0]
982 logging.debug(
'set_key = %s', (set_key))
983 lvls_lst = [(lvl, [set_key])
for lvl
in rules_sets[set_key].rules_order[1:]]
984 for rules_set_name
in list(rules_sets.keys())[1:]:
985 for lvl_name
in rules_sets[rules_set_name].rules_order[1:]:
986 names_list = [lst_item[0]
for lst_item
in lvls_lst]
987 if lvl_name
in names_list:
988 lvls_lst[names_list.index(lvl_name)][1].append(rules_set_name)
990 prev_ndx = rules_sets[rules_set_name].rules_order.index(lvl_name) - 1
991 if rules_sets[rules_set_name].rules_order[prev_ndx]
in names_list:
992 ins_ndx = names_list.index(rules_sets[rules_set_name].rules_order[prev_ndx]) + 1
995 lvls_lst.insert(ins_ndx, (lvl_name, [rules_set_name]))
1001 Creates the message to be displayed when help is provided.
1005 %prog [options] parameter_file
1007 The parameter_file is similar to, but not exactly like, parameter
1008 files for OCSSW processing programs:
1009 - It has sections separated by headers which are denoted by "["
1011 The section named "main" is required. Its allowed options are:
1012 ifile - Required entry naming the input file(s) to be processed.
1013 use_nrt_anc - use near real time ancillary data
1014 deletefiles - delete all the intermediate data files genereated
1015 overwrite - overwrite any data files which already exist
1016 use_existing - use any data files which already exist
1018 Simultaneous use of both the overwrite and use_existing options
1021 The names for other sections are the programs for which that section's
1022 entries are to be applied. Intermediate sections which are required for the
1023 final level of processing do not need to be defined if their default options
1024 are acceptable. A section can be empty. The final level of processing
1025 must have a section header, even if no entries appear within that section.
1026 - Entries within a section appear as key=value. Comma separated lists of
1027 values can be used when appropriate.
1028 - Comments are marked by "#"; anything appearing on a line after that
1029 character is ignored. A line beginning with a "#" is completely ignored.
1031 In addition to the main section, the following sections are allowed:
1032 Section name: Applicable Instrument(s):
1033 ------------- -------------------------\n"""
1036 for lname
in level_names:
1037 lvl_name_help +=
' {0:24s}{1}\n'.\
1038 format(lname[0] +
':',
', '.join(lname[1]))
1040 message += lvl_name_help
1044 # Sample par file for %prog.
1046 ifile=2010345034027.L1A_LAC
1049 # final processing level
1055 Perform the processing for each step (element of processor_list) needed.
1057 global input_file_data
1059 files_to_delete = []
1060 input_files_list = []
1064 skip_par_ifile =
True
1065 if os.path.exists(cmd_line_ifile):
1066 input_files_list = [cmd_line_ifile]
1068 msg =
'Error! Specified ifile {0} does not exist.'.\
1069 format(cmd_line_ifile)
1072 skip_par_ifile =
False
1073 if par_contnts[
'main']:
1074 if (
not skip_par_ifile)
and (
not 'ifile' in par_contnts[
'main']):
1075 msg =
'Error! No ifile specified in the main section of {0}.'.\
1082 cfg_data.deletefiles =
True
1084 cfg_data.use_existing =
True
1086 cfg_data.overwrite =
True
1087 if 'use_nrt_anc' in par_contnts[
'main']
and \
1088 int(par_contnts[
'main'][
'use_nrt_anc']) == 0:
1089 cfg_data.get_anc =
False
1090 if 'odir' in par_contnts[
'main']:
1091 dname = par_contnts[
'main'][
'odir']
1092 if os.path.exists(dname):
1093 if os.path.isdir(dname):
1094 if cfg_data.output_dir_is_settable:
1095 cfg_data.output_dir = os.path.realpath(dname)
1097 log_msg =
'Ignoring par file specification for output directory, {0}; using command line value, {1}.'.format(par_contnts[
'main'][
'odir'], cfg_data.output_dir)
1098 logging.info(log_msg)
1100 msg =
'Error! {0} is not a directory.'.format(dname)
1103 msg =
'Error! {0} does not exist.'.format(dname)
1106 logging.debug(
'cfg_data.overwrite: ' +
str(cfg_data.overwrite))
1107 logging.debug(
'cfg_data.use_existing: ' +
str(cfg_data.use_existing))
1108 logging.debug(
'cfg_data.deletefiles: ' +
str(cfg_data.deletefiles))
1109 if cfg_data.overwrite
and cfg_data.use_existing:
1110 err_msg =
'Error! Incompatible options overwrite and use_existing were found in {0}.'.format(par_file)
1112 if len(input_files_list) == 1:
1113 if MetaUtils.is_ascii_file(input_files_list[0])
and not MetaUtils.is_metadata_file(input_files_list[0]):
1116 if not input_file_data:
1117 log_and_exit(
'No valid data files were specified for processing.')
1118 logging.debug(
"input_file_data: " +
str(input_file_data))
1122 get_processors(src_files, input_file_data, par_contnts, files_to_delete)
1128 err_msg =
"Unrecoverable error encountered in processing."
1132 if cfg_data.verbose:
1133 print (
"Processing complete.")
1135 logging.debug(
"Processing complete.")
1140 Execute what is contained in command and then output the results to log
1141 files and the console, as appropriate.
1144 print (
"Entering execute_command, cfg_data.verbose =",
1146 log_msg =
'Executing command:\n {0}'.format(command)
1147 logging.debug(log_msg)
1148 subproc = subprocess.Popen(command, shell=
True, stdout=subprocess.PIPE,
1149 stderr=subprocess.PIPE)
1150 std_out, err_out = subproc.communicate()
1151 status = subproc.returncode
1152 logging.info(std_out)
1153 logging.info(err_out)
1154 if cfg_data.verbose:
1160 Returns a single section (e.g. L1a, GEO, L1B, L2, etc.) from the "par" file.
1163 for key
in list(par_contents[section].keys()):
1164 sect_dict[key] = par_contents[section][key]
1169 Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1170 returns that file name; otherwise, returns None.
1172 src_dir = os.path.dirname(inp_file)
1173 src_base = os.path.basename(inp_file)
1174 src_base_tmp = src_base.replace(
"L1B",
"GEO")
1175 geo_base = src_base_tmp.replace(
"_LAC",
"")
1177 geo_file = os.path.join(src_dir, geo_base)
1178 if not os.path.exists(geo_file):
1184 Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1185 returns that file name; otherwise, returns None.
1187 src_dir = os.path.dirname(inp_file)
1188 src_base = os.path.basename(inp_file)
1189 if instrument.find(
'hawkeye') != -1:
1190 geo_base = src_base.replace(
"L1A",
"GEO")
1191 elif instrument.find(
'modis') != -1:
1192 if lvl.find(
'level 1a') != -1:
1193 src_base_tmp = src_base.replace(
"L1A",
"GEO")
1194 geo_base = src_base_tmp.replace(
"_LAC",
"")
1195 elif lvl.find(
'level 1b') != -1:
1196 src_base_tmp = src_base.replace(
"L1B",
"GEO")
1197 geo_base = src_base_tmp.replace(
"_LAC",
"")
1198 elif instrument.find(
'viirs') != -1:
1199 if lvl.find(
'level 1a') != -1:
1200 geo_base = src_base.replace(
"L1A",
"GEO-M")
1201 elif lvl.find(
'level 1b') != -1:
1202 geo_base = src_base.replace(
"L1B",
"GEO")
1204 geo_file = os.path.join(src_dir, geo_base)
1205 if not os.path.exists(geo_file):
1211 Searches for a GEO file corresponding to first_svm_file. If that GEO file
1212 exists, returns that file name; otherwise, returns None.
1214 fname = first_svm_file.replace(
'SVM01',
'GMTCO').rstrip()
1215 if not os.path.exists(fname):
1221 Returns the output file for a "batch" run, i.e. a process that can accept
1222 multiple inputs, such as l2bin or l3bin.
1224 mission_prefixes = [
'A',
'C',
'O',
'S',
'T']
1226 if not len(file_set):
1227 err_msg =
"Error! An output file name could not be determined."
1229 elif len(file_set) == 1:
1230 stem = os.path.splitext(file_set[0])[0]
1232 earliest_file = file_set[0]
1233 latest_file = file_set[0]
1236 for cur_file
in file_set[1:]:
1238 if file_date < earliest_file_date:
1239 earliest_file = cur_file
1240 earliest_file_date = file_date
1241 elif file_date > latest_file_date:
1242 latest_file = cur_file
1243 latest_file_date = file_date
1244 if (earliest_file[0] == latest_file[0])
and \
1245 (earliest_file[0]
in mission_prefixes):
1246 stem = earliest_file[0]
1249 earliest_file_date_stamp = earliest_file_date.strftime(
'%Y%j')
1250 latest_file_date_stamp = latest_file_date.strftime(
'%Y%j')
1251 if earliest_file_date_stamp == latest_file_date_stamp:
1252 stem += earliest_file_date_stamp
1254 stem += earliest_file_date_stamp + latest_file_date_stamp
1255 return ''.join([stem,
'.', suffix])
1259 If found in par_contents, the value for the option specified by opt_text
1260 is returned; otherwise, False is returned.
1263 if opt_text
in par_contents[
'main']:
1264 opt_str = par_contents[
'main'][opt_text].upper()
1265 opt_found = mlp_utils.is_option_value_true(opt_str)
1270 Run the lonlat2pixline program and return the parameters found.
1274 in_file = proc.geo_file
1277 in_file = proc.input_file
1278 args =
' '.join([in_file, proc.par_data[
'SWlon'],
1279 proc.par_data[
'SWlat'], proc.par_data[
'NElon'],
1280 proc.par_data[
'NElat']])
1281 lonlat_prog = os.path.join(proc.ocssw_bin,
'lonlat2pixline')
1282 lonlat_cmd =
' '.join([lonlat_prog, args])
1283 logging.debug(
'Executing lonlat2pixline command: "%s"', lonlat_cmd)
1284 process_output = subprocess.Popen(lonlat_cmd, shell=
True,
1285 stdout=subprocess.PIPE).communicate()[0]
1286 lonlat_output = process_output.splitlines()
1291 for line
in lonlat_output:
1292 line_text =
str(line).strip(
"'")
1293 if 'sline' in line_text:
1294 start_line =
int(line_text.split(
'=')[1])
1295 if 'eline' in line_text:
1296 end_line =
int(line_text.split(
'=')[1])
1297 if 'spixl' in line_text:
1298 start_pixel =
int(line_text.split(
'=')[1])
1299 if 'epixl' in line_text:
1300 end_pixel =
int(line_text.split(
'=')[1])
1301 return start_line, end_line, start_pixel, end_pixel
1305 Get a Python Date object from a recognized file name's year and day of year.
1307 base_filename = os.path.basename(filename)
1308 if re.match(
r'[ACMOQSTV]\d\d\d\d\d\d\d.*', base_filename):
1309 year =
int(base_filename[1:5])
1310 doy =
int(base_filename[5:8])
1311 elif re.match(
r'\d\d\d\d\d\d\d.*', base_filename):
1313 year =
int(base_filename[0:4])
1314 doy =
int(base_filename[4:7])
1315 elif re.match(
r'\w*_npp_d\d\d\d\d\d\d\d_.*', base_filename):
1317 prefix_removed_name = re.sub(
r'\w*_npp_d',
'', base_filename)
1318 year =
int(prefix_removed_name[0:4])
1319 doy =
int(prefix_removed_name[5:7])
1321 err_msg =
'Unable to determine date for {0}'.format(filename)
1323 file_date = datetime.datetime(year, 1, 1) + datetime.timedelta(doy - 1)
1328 Returns the values of the file handling options in par_contents.
1333 return deletefiles, use_existing, overwrite
1337 Get input files found in the uber par file's ifile line, a file list file,
1338 or both. Ensure that the list contains no duplicates.
1342 from_infilelist = []
1343 if 'ifile' in par_data[
'main']:
1344 inp_file_str = par_data[
'main'][
'ifile'].split(
'#', 2)[0]
1345 cleaned_str = re.sub(
r'[\t,:\[\]()"\']',
' ', inp_file_str)
1346 from_ifiles = cleaned_str.split()
1347 if 'infilelist' in par_data[
'main']:
1348 infilelist_name = par_data[
'main'][
'infilelist']
1349 if os.path.exists(infilelist_name):
1350 if os.path.isfile(infilelist_name)
and \
1351 os.access(infilelist_name, os.R_OK):
1352 with open(infilelist_name,
'rt')
as in_file_list_file:
1353 inp_lines = in_file_list_file.readlines()
1354 from_infilelist = [fn.rstrip()
for fn
in inp_lines
1355 if not re.match(
r'^\s*#', fn)]
1356 if len(from_ifiles) == 0
and len(from_infilelist) == 0:
1360 return list(
set(from_ifiles + from_infilelist))
1364 Returns a dictionary with the the file_type (L0, L1A, L2, etc) and
1365 instrument for each file in the input list.
1369 'level 0':
'level 0',
1370 'level 1 browse data':
'l1brsgen',
1371 'level 1a':
'level 1a',
1372 'level 1b':
'level 1b',
1375 'level 3 binned':
'l3bin',
1376 'level 3 smi':
'smigen'
1378 input_file_type_data = {}
1379 for inp_file
in input_files_list:
1385 file_type, file_instr = file_typer.get_file_type()
1392 if file_type.lower()
in converter:
1393 file_type = converter[file_type.lower()]
1394 input_file_type_data[inp_file] = (file_type, file_instr.lower())
1398 warn_msg =
"Warning: Unable to determine a type for file {0}. It will not be processed.".format(inp_file)
1400 logging.info(warn_msg)
1401 return input_file_type_data
1405 Create processor objects for products which are needed, but not explicitly
1406 specified in the par file.
1408 existing_products = [proc.target_type
for proc
in existing_procs]
1410 lowest_source_level)
1411 intermediate_processors = []
1412 for prod
in intermediate_products:
1415 if not prod
in existing_products:
1416 new_proc = processor.Processor(sensor, rules, prod, {},
1417 cfg_data.hidden_dir)
1418 intermediate_processors.append(new_proc)
1419 return intermediate_processors
1422 lowest_source_level):
1424 Find products which are needed, but not explicitly specified by the
1428 for prog
in existing_prod_names:
1430 lowest_source_level)
1431 if not isinstance(candidate_progs,
type(
None)):
1432 for candidate_prog
in candidate_progs:
1433 required_progs.append(candidate_prog)
1435 required_progs.sort()
1436 return required_progs
1440 Returns the extension for an L2 file. For the time being, this is
1441 just '.L2'; however, different extensions may be wanted in the future, thus
1442 this function is in place.
1448 Returns the extension for an L3 Binned file. For the time being, this is
1449 just '.L3bin'; however, different extensions may be wanted in the future,
1450 thus this function is in place.
1456 Find the level of the lowest level source file to be processed.
1458 order = [
'level 1a',
'geo',
'level 1b',
'l2gen',
1459 'l2bin',
'l3bin',
'l3mapgen']
1460 if len(source_files) == 1:
1461 return list(source_files.keys())[0]
1463 lowest =
list(source_files.keys())[0]
1464 for key
in list(source_files.keys())[1:]:
1466 if order.index(key) < order.index(lowest):
1472 Extract the options for a program to be run from the corresponding data in
1476 for key
in par_data:
1480 if key !=
'ofile' and key !=
'odir' and not key.lower()
in FILE_USE_OPTS:
1482 options +=
' ' + key +
'=' + par_data[key]
1484 options +=
' ' + key
1489 Determine what the output name would be if targ_prog is run on input_files.
1491 cl_opts = optparse.Values()
1492 cl_opts.suite = suite
1493 cl_opts.oformat = oformt
1494 cl_opts.resolution = res
1495 if not isinstance(inp_files, list):
1497 output_name = get_output_name.get_output_name([data_file], targ_prog,
1500 output_name = get_output_name.get_output_name(inp_files, targ_prog,
1506 Determine the output name for a program to be run.
1510 if input_name
in input_files:
1511 if input_files[input_name][0] ==
'level 0' and \
1512 input_files[input_name][1].find(
'modis') != -1:
1513 if input_files[input_name][1].find(
'aqua') != -1:
1518 if os.path.exists(input_name +
'.const'):
1519 with open(input_name +
'.const')
as constructor_file:
1520 constructor_data = constructor_file.readlines()
1521 for line
in constructor_data:
1522 if line.find(
'starttime=') != -1:
1523 start_time = line[line.find(
'=') + 1].strip()
1525 time_stamp = ProcUtils.date_convert(start_time,
't',
'j')
1527 if re.match(
r'MOD00.P\d\d\d\d\d\d\d\.\d\d\d\d', input_name):
1528 time_stamp = input_name[7:14] + input_name[15:19] +
'00'
1530 err_msg =
"Cannot determine time stamp for input file {0}".\
1533 output_name = first_char + time_stamp +
'.L1A'
1536 (dirname, basename) = os.path.split(input_name)
1537 basename_parts = basename.rsplit(
'.', 2)
1538 output_name = os.path.join(dirname, basename_parts[0] +
'.' +
1541 (dirname, basename) = os.path.split(input_name)
1542 basename_parts = basename.rsplit(
'.', 2)
1543 output_name = os.path.join(dirname, basename_parts[0] +
'.' + suffix)
1548 Return the contents of the input "par" file.
1550 acceptable_par_keys = {
1551 'level 0' :
'level 0',
'l0' :
'level 0',
1552 'level 1a' :
'level 1a',
'l1a' :
'level 1a',
'l1agen':
'level 1a',
1553 'modis_L1A':
'level 1a',
1554 'l1brsgen':
'l1brsgen',
1555 'l1mapgen':
'l1mapgen',
1556 'l1aextract':
'l1aextract',
1557 'l1aextract_modis':
'l1aextract_modis',
1558 'l1aextract_seawifs' :
'l1aextract_seawifs',
1559 'l1aextract_viirs' :
'l1aextract_viirs',
1560 'l1brsgen' :
'l1brsgen',
1561 'geo' :
'geo',
'modis_GEO':
'geo',
'geolocate_viirs':
'geo',
1562 'geolocate_hawkeye':
'geo',
1563 'level 1b' :
'level 1b',
'l1b' :
'level 1b',
'l1bgen' :
'level 1b',
1564 'modis_L1B':
'level 1b',
'calibrate_viirs':
'level 1b',
1565 'level 2' :
'l2gen',
1568 'l2brsgen' :
'l2brsgen',
1569 'l2extract' :
'l2extract',
1570 'l2mapgen' :
'l2mapgen',
1572 'l3mapgen' :
'l3mapgen',
1573 'smigen' :
'smigen',
1576 if cfg_data.verbose:
1577 print (
"Processing %s" % par_file)
1578 par_reader = uber_par_file_reader.ParReader(par_file,
1579 acceptable_single_keys,
1580 acceptable_par_keys)
1581 par_contents = par_reader.read_par_file()
1582 ori_keys =
list(par_contents.keys())
1583 for key
in ori_keys:
1584 if key
in acceptable_par_keys:
1585 if key != acceptable_par_keys[key]:
1586 par_contents[acceptable_par_keys[key]] = par_contents[key]
1587 del par_contents[key]
1589 acc_key_str =
', '.join(
list(acceptable_par_keys.keys()))
1590 err_msg =
"""Error! Parameter file {0} contains a section titled "{1}", which is not a recognized program.
1591 The recognized programs are: {2}""".format(par_file, key, acc_key_str)
1594 if 'main' in par_contents:
1597 err_msg =
'Error! Could not find section "main" in {0}'.format(par_file)
1599 return par_contents, input_files_list
1603 Determine the processors which are needed.
1606 for key
in list(par_contents.keys()):
1609 proc = processor.Processor(sensor, rules, key, section_contents,
1610 cfg_data.hidden_dir)
1611 processors.append(proc)
1615 lowest_source_level)
1620 if proc.out_directory == cfg_data.hidden_dir:
1621 proc.out_directory = cfg_data.output_dir
1622 if proc.requires_batch_processing():
1623 logging.debug(
'Performing batch processing for ' +
str(proc))
1628 if proc.rule_set.rules[proc.target_type].action:
1629 logging.debug(
'Performing nonbatch processing for ' +
str(proc))
1638 if success_count == 0:
1639 msg =
'The {0} processor produced no output files.'.format(proc.target_type)
1643 msg =
'-I- There is no way to create {0} files for {1}.'.format(proc.target_type, proc.instrument)
1648 Determine the processors which are needed.
1650 order = [
'level 0',
'level 1a',
'geo',
'l1aextract',
1651 'level 1b',
'l1brsgen',
'l1mapgen',
'l2gen',
'l2extract',
1652 'l2bin',
'l2brsgen',
'l2mapgen',
'l3bin',
'l3mapgen',
1654 key_list =
list(par_contents.keys())
1655 last_key = key_list[-1]
1656 for key
in key_list:
1659 if not order.index(key) > order.index(
'l2gen'):
1660 src_lvls =
list(src_files.keys())
1661 if not key
in src_files:
1663 for src_lvl
in src_lvls:
1664 if order.index(src_lvl) < order.index(
'l2gen'):
1665 for file
in src_files[src_lvl]:
1667 instrument = file_typer.get_file_type()[1].lower().split()[0]
1669 logging.debug(
"instrument: " + instrument)
1670 if instrument
in sensors_sets:
1671 rules = sensors_sets[instrument].recipe
1672 sensor = sensors_sets[instrument]
1674 rules = sensors_sets[
'general'].recipe
1675 sensor = sensors_sets[
'general']
1676 proc = processor.Processor(sensor, rules, key, section_contents,
1677 cfg_data.hidden_dir)
1678 proc.input_file = file
1679 if file_typer.get_file_type()[0].lower().find(
'level 0') == -1:
1680 if sensor.require_geo
and key !=
'geo':
1681 proc.geo_file =
find_geo_file2(proc.input_file, instrument, src_lvl)
1683 if not proc.geo_file:
1684 if src_lvl.find(
'level 1b') != -1:
1685 err_msg =
'Error! Need level 1a file for GEO'
1687 proc_geo = processor.Processor(sensor, rules,
'geo', {},
1688 cfg_data.hidden_dir)
1689 proc_geo.input_file = file
1690 print (
'Running geo on file {0}.'.format(file))
1692 log_msg =
'Processing for geo:'
1693 logging.debug(log_msg)
1695 if cfg_data.deletefiles:
1697 files_to_delete.append(proc.geo_file)
1698 if key ==
'l2gen' and sensor.require_l1b_for_l2gen
and src_lvl.find(
'level 1b') == -1:
1699 proc_l1b = processor.Processor(sensor, rules,
'level 1b', {},
1700 cfg_data.hidden_dir)
1701 if sensor.require_geo:
1702 proc_l1b.input_file = file
1703 proc_l1b.geo_file = proc.geo_file
1704 print (
'Running level 1b on file {0}.'.format(file))
1706 log_msg =
'Processing for level 1b:'
1707 logging.debug(log_msg)
1708 proc.input_file =
exe_processor(proc_l1b, src_files, src_lvl)
1709 if cfg_data.deletefiles:
1711 files_to_delete.append(proc.input_file)
1712 print (
'Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1714 log_msg =
'Processing for {0}:'.format(proc.target_type)
1715 logging.debug(log_msg)
1717 src_files[key].append(out_file)
1718 if cfg_data.deletefiles
and key != last_key:
1720 files_to_delete.append(out_file)
1721 if key.find(
'l1aextract') != -1:
1722 src_files[
'level 1a'] = src_files[key]
1723 del src_files[
'l1aextract']
1726 if key !=
'level 1a':
1727 proc_l1a = processor.Processor(sensor, rules,
'level 1a', {},
1728 cfg_data.hidden_dir)
1729 proc_l1a.input_file = file
1730 print (
'Running level 1a on file {0}.'.format(file))
1732 log_msg =
'Processing for level 1a:'
1733 logging.debug(log_msg)
1734 proc.input_file =
exe_processor(proc_l1a, src_files, src_lvl)
1735 if cfg_data.deletefiles:
1737 files_to_delete.append(proc.input_file)
1738 if sensor.require_geo
and key !=
'geo' and key !=
'level 1a':
1739 proc.geo_file =
find_geo_file2(proc.input_file, instrument,
'level 1a')
1740 if not proc.geo_file:
1741 proc_geo = processor.Processor(sensor, rules,
'geo', {},
1742 cfg_data.hidden_dir)
1743 proc_geo.input_file = proc.input_file
1744 print (
'Running geo on file {0}.'.format(proc.input_file))
1746 log_msg =
'Processing for geo:'
1747 logging.debug(log_msg)
1749 if cfg_data.deletefiles:
1751 files_to_delete.append(proc.geo_file)
1752 if key ==
'l2gen' and sensor.require_l1b_for_l2gen:
1753 proc_l1b = processor.Processor(sensor, rules,
'level 1b', {},
1754 cfg_data.hidden_dir)
1755 if sensor.require_geo:
1756 proc_l1b.input_file = proc.input_file
1757 proc_l1b.geo_file = proc.geo_file
1758 print (
'Running level 1b on file {0}.'.format(proc.input_file))
1760 log_msg =
'Processing for level 1b:'
1761 logging.debug(log_msg)
1762 proc.input_file =
exe_processor(proc_l1b, src_files, src_lvl)
1763 if cfg_data.deletefiles:
1765 files_to_delete.append(proc.input_file)
1766 print (
'Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1768 log_msg =
'Processing for {0}:'.format(proc.target_type)
1769 logging.debug(log_msg)
1771 src_files[key].append(out_file)
1772 if cfg_data.deletefiles
and key != last_key:
1774 files_to_delete.append(out_file)
1775 if key.find(
'l1aextract') != -1:
1776 src_files[
'level 1a'] = src_files[key]
1777 del src_files[
'l1aextract']
1779 if len(src_files) > 1:
1780 del src_files[src_lvl]
1782 src_lvls =
list(src_files.keys())
1783 rules = sensors_sets[
'general'].recipe
1784 sensor = sensors_sets[
'general']
1785 if not key
in src_files:
1787 for src_lvl
in src_lvls:
1788 if not order.index(src_lvl) < order.index(
'l2gen'):
1789 for file
in src_files[src_lvl]:
1790 proc = processor.Processor(sensor, rules, key, section_contents,
1791 cfg_data.hidden_dir)
1792 proc.input_file = file
1793 for program
in proc.required_types:
1794 if not program
in src_files:
1795 proc1 = processor.Processor(sensor, rules, program, {},
1796 cfg_data.hidden_dir)
1797 proc1.input_file = file
1799 for program2
in proc1.required_types:
1800 if program2.find(src_lvl) == -1:
1801 proc2 = processor.Processor(sensor, rules, program2, {},
1802 cfg_data.hidden_dir)
1803 proc2.input_file = file
1805 print (
'Running {0} on file {1}.'.format(proc2.target_type, proc2.input_file))
1807 log_msg =
'Processing for {0}:'.format(proc2.target_type)
1808 logging.debug(log_msg)
1810 if cfg_data.deletefiles:
1811 if proc1.input_file:
1812 files_to_delete.append(proc1.input_file)
1813 print (
'Running {0} on file {1}.'.format(proc1.target_type, proc1.input_file))
1815 log_msg =
'Processing for {0}:'.format(proc1.target_type)
1816 logging.debug(log_msg)
1818 if cfg_data.deletefiles:
1820 files_to_delete.append(proc.input_file)
1821 del src_files[src_lvl]
1822 print (
'Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1824 log_msg =
'Processing for {0}:'.format(proc.target_type)
1825 logging.debug(log_msg)
1827 src_files[key].append(out_file)
1828 if cfg_data.deletefiles
and key != last_key:
1830 files_to_delete.append(out_file)
1831 if program
in src_files:
1832 del src_files[program]
1833 if key.find(
'l2extract') != -1:
1834 src_files[
'l2gen'] = src_files[key]
1835 del src_files[
'l2extract']
1836 if proc.requires_batch_processing:
1843 Returns the programs required too produce the desired final output.
1845 programs_to_run = []
1846 cur_rule = ruleset.rules[target_program]
1847 src_types = cur_rule.src_file_types
1848 if src_types[0] == cur_rule.target_type:
1849 programs_to_run = [target_program]
1851 for src_type
in src_types:
1852 if src_type
in ruleset.rules:
1853 if ruleset.order.index(src_type) > \
1854 ruleset.order.index(lowest_source_level):
1855 programs_to_run.insert(0, src_type)
1856 if len(src_types) > 1:
1857 programs_to_run.insert(0, src_types[1])
1859 lowest_source_level)
1860 for prog
in programs_to_add:
1861 programs_to_run.insert(0, prog)
1862 return programs_to_run
1866 :param source_files: list of source files
1867 :param proc_src_types: list of source types for the processor
1868 :param proc_src_ndx: index into the proc_src_types list pointing to the
1869 source type to use to get the input files
1870 :return: list of GEO files that correspond to the files in source_files
1872 inp_files = source_files[proc_src_types[proc_src_ndx]]
1874 for inp_file
in inp_files:
1877 geo_files.append(geo_file)
1879 err_msg =
'Error! Cannot find GEO ' \
1880 'file {0}.'.format(geo_file)
1886 Returns the set of source files needed.
1888 if len(proc_src_types) == 1:
1890 src_file_sets = source_files[src_key]
1901 err_msg =
'Error! Unable to determine what source files are required for the specified output files.'
1904 if requires_all_sources:
1905 if len(proc_src_types) == 2:
1906 if proc_src_types[0]
in source_files \
1907 and proc_src_types[1]
in source_files:
1908 src_file_sets =
list(zip(source_files[proc_src_types[0]],
1909 source_files[proc_src_types[1]]))
1911 if proc_src_types[0]
in source_files:
1912 if proc_src_types[1] ==
'geo':
1914 src_file_sets =
list(zip(source_files[proc_src_types[0]],
1917 err_msg =
'Error! Cannot find all {0} and' \
1918 ' {1} source files.'.format(proc_src_types[0],
1921 elif proc_src_types[1]
in source_files:
1922 if proc_src_types[0] ==
'geo':
1924 src_file_sets =
list(zip(source_files[proc_src_types[1]],
1927 err_msg =
'Error! Cannot find all {0} and' \
1928 ' {1} source files.'.format(proc_src_types[0],
1932 err_msg =
'Error! Cannot find all source files.'
1935 err_msg =
'Error! Encountered too many source file types.'
1938 for proc_src_type
in proc_src_types:
1939 if proc_src_type
in source_files:
1940 src_file_sets = source_files[proc_src_type]
1941 return src_file_sets
1945 Returns a dictionary containing the programs to be run (as keys) and the
1946 a list of files on which that program should be run.
1949 for file_path
in input_files:
1950 ftype = input_files[file_path][0]
1951 if ftype
in source_files:
1952 source_files[ftype].append(file_path)
1954 source_files[ftype] = [file_path]
1959 Return the list of source product types needed to produce the final product.
1961 src_prod_names = [targt_prod]
1962 targt_pos = ruleset.order.index(targt_prod)
1964 for pos
in range(targt_pos, 1, -1):
1965 for prod_name
in src_prod_names:
1966 if ruleset.rules[ruleset.order[pos]].target_type == prod_name:
1967 for src_typ
in ruleset.rules[ruleset.order[pos]].src_file_types:
1968 new_prod_names.append(src_typ)
1969 src_prod_names += new_prod_names
1970 return src_prod_names
1974 Returns an error message built from traceback data.
1976 exc_parts = [
str(l)
for l
in sys.exc_info()]
1977 err_type_parts =
str(exc_parts[0]).strip().split(
'.')
1978 err_type = err_type_parts[-1].strip(
"'>")
1979 tb_data = traceback.format_exc()
1980 tb_line = tb_data.splitlines()[-3]
1981 line_num = tb_line.split(
',')[1]
1982 st_data = traceback.extract_stack()
1983 err_file = os.path.basename(st_data[-1][0])
1984 msg =
'Error! The {0} program encountered an unrecoverable {1}, {2}, at {3} of {4}!'.\
1985 format(cfg_data.prog_name,
1986 err_type, exc_parts[1], line_num.strip(), err_file)
1993 sensors = dict(general=
Sensor(),
2004 Record error_msg in the debug log, then exit with error_msg going to stderr
2005 and an exit code of 1; see:
2006 http://docs.python.org/library/sys.html#exit.
2008 logging.info(error_msg)
2013 main processing function.
2021 version=
' '.join([
'%prog', __version__]))
2025 print (
"\nError! No file specified for processing.\n")
2026 cl_parser.print_help()
2038 options.verbose, options.overwrite,
2039 options.use_existing,
2040 options.deletefiles, options.odir)
2041 if not os.access(cfg_data.hidden_dir, os.R_OK):
2042 log_and_exit(
"Error! The working directory is not readable!")
2043 if os.path.exists(args[0]):
2044 log_timestamp = datetime.datetime.today().strftime(
'%Y%m%d%H%M%S')
2067 err_msg =
'Unanticipated error encountered during processing!'
2070 err_msg =
'Error! Parameter file {0} does not exist.'.\
2078 Get arguments and options from the calling command line.
2079 To be consistent with other OBPG programs, an underscore ('_') is used for
2080 multiword options, instead of a dash ('-').
2082 cl_parser.add_option(
'--debug', action=
'store_true', dest=
'debug',
2083 default=
False, help=optparse.SUPPRESS_HELP)
2084 cl_parser.add_option(
'-d',
'--deletefiles', action=
'store_true',
2085 dest=
'deletefiles', default=
False,
2086 help=
'delete files created during processing')
2087 cl_parser.add_option(
'--ifile', action=
'store', type=
'string',
2088 dest=
'ifile', help=
"input file")
2089 cl_parser.add_option(
'--output_dir',
'--odir',
2090 action=
'store', type=
'string', dest=
'odir',
2091 help=
"user specified directory for output")
2092 cl_parser.add_option(
'--overwrite', action=
'store_true',
2093 dest=
'overwrite', default=
False,
2094 help=
'overwrite files which already exist (default = stop processing if file already exists)')
2100 cl_parser.add_option(
'--use_existing', action=
'store_true',
2101 dest=
'use_existing', default=
False,
2102 help=
'use files which already exist (default = stop processing if file already exists)')
2103 cl_parser.add_option(
'-v',
'--verbose',
2104 action=
'store_true', dest=
'verbose', default=
False,
2105 help=
'print status messages to stdout')
2107 (options, args) = cl_parser.parse_args()
2108 for ndx, cl_arg
in enumerate(args):
2109 if cl_arg.startswith(
'par='):
2110 args[ndx] = cl_arg.lstrip(
'par=')
2111 if options.overwrite
and options.use_existing:
2112 log_and_exit(
'Error! Options overwrite and use_existing cannot be ' + \
2113 'used simultaneously.')
2114 return options, args
2118 Reads flf_name and returns the list of files to be processed.
2122 with open(flf_name,
'rt')
as flf:
2123 inp_lines = flf.readlines()
2124 for line
in inp_lines:
2125 fname = line.split(
'#')[0].strip()
2127 if os.path.exists(fname):
2128 files_list.append(fname)
2130 bad_lines.append(fname)
2131 if len(bad_lines) > 0:
2132 err_msg =
'Error! File {0} specified the following input files which could not be located:\n {1}'.\
2133 format(flf_name,
', '.join([bl
for bl
in bad_lines]))
2139 Run a processor, e.g. l2bin, which processes batches of files.
2142 if os.path.exists((file_set[0]))
and tarfile.is_tarfile(file_set[0]):
2143 processor.input_file = file_set[0]
2145 timestamp = time.strftime(
'%Y%m%d_%H%M%S', time.gmtime(time.time()))
2146 file_list_name = cfg_data.hidden_dir + os.sep +
'files_' + \
2147 processor.target_type +
'_' + timestamp +
'.lis'
2148 with open(file_list_name,
'wt')
as file_list:
2149 for fname
in file_set:
2150 file_list.write(fname +
'\n')
2151 processor.input_file = file_list_name
2154 for fspec
in file_set:
2156 data_file_list.append(dfile)
2157 if 'suite' in processor.par_data:
2158 finder_opts[
'suite'] = processor.par_data[
'suite']
2159 elif 'prod' in processor.par_data:
2160 finder_opts[
'suite'] = processor.par_data[
'prod']
2161 if 'resolution' in processor.par_data:
2162 finder_opts[
'resolution'] = processor.par_data[
'resolution']
2163 if 'oformat' in processor.par_data:
2164 finder_opts[
'oformat'] = processor.par_data[
'oformat']
2168 if processor.output_file:
2169 processor.output_file = os.path.join(processor.out_directory,
2170 processor.output_file )
2172 processor.output_file = os.path.join(processor.out_directory,
2173 get_output_name.get_output_name(data_file_list,
2174 processor.target_type,
2177 log_msg =
"Running {0} with input file {1} to generate {2} ".\
2178 format(processor.target_type,
2179 processor.input_file,
2180 processor.output_file)
2181 logging.debug(log_msg)
2183 return processor.output_file
2187 Run a processor which deals with single input files (or pairs of files in
2188 the case of MODIS L1B processing in which GEO files are also needed).
2198 cl_opts = optparse.Values()
2199 if 'suite' in processor.par_data:
2200 cl_opts.suite = processor.par_data[
'suite']
2201 elif 'prod' in processor.par_data:
2202 cl_opts.suite = processor.par_data[
'prod']
2204 cl_opts.suite =
None
2205 if 'resolution' in processor.par_data:
2206 cl_opts.resolution = processor.par_data[
'resolution']
2208 cl_opts.resolution =
None
2209 if 'oformat' in processor.par_data:
2210 cl_opts.oformat = processor.par_data[
'oformat']
2212 cl_opts.oformat =
None
2216 if processor.output_file:
2217 output_file = os.path.join(processor.out_directory, processor.output_file)
2219 output_file = os.path.join(processor.out_directory,
2220 get_output_name.get_output_name([dfile], processor.target_type, cl_opts))
2222 print (
'in run_nonbatch_processor, output_file = ' + output_file)
2224 processor.output_file = output_file
2232 if (
not os.path.exists(output_file))
or cfg_data.overwrite:
2233 if cfg_data.verbose:
2235 print (
'\nRunning ' +
str(processor))
2237 proc_status = processor.execute()
2241 msg =
"Error! Status {0} was returned during {1} {2} processing.".\
2242 format(proc_status, processor.instrument,
2243 processor.target_type)
2247 elif not cfg_data.use_existing:
2248 log_and_exit(
'Error! Target file {0} already exists.'.\
2249 format(output_file))
2250 processor.input_file =
''
2251 processor.output_file =
''
2256 Build the command to run the processing script which is passed in.
2259 args =
' ifile=' + proc.input_file
2260 args +=
' ofile=' + proc.output_file
2262 cmd =
' '.join([prog, args])
2263 logging.debug(
"\nRunning: " + cmd)
2291 Opens log file(s) for debugging.
2293 info_log_name =
''.join([
'Processor_', time_stamp,
'.log'])
2294 debug_log_name =
''.join([
'multilevel_processor_debug_', time_stamp,
2296 info_log_path = os.path.join(cfg_data.output_dir, info_log_name)
2297 debug_log_path = os.path.join(cfg_data.output_dir, debug_log_name)
2298 mlp_logger = logging.getLogger()
2301 info_hndl = logging.FileHandler(info_log_path)
2302 info_hndl.setLevel(logging.INFO)
2303 mlp_logger.addHandler(info_hndl)
2306 debug_hndl = logging.FileHandler(debug_log_path)
2307 debug_hndl.setLevel(logging.DEBUG)
2308 mlp_logger.addHandler(debug_hndl)
2309 logging.debug(
'Starting ' + os.path.basename(sys.argv[0]) +
' at ' +
2310 datetime.datetime.now().strftime(
"%Y-%m-%d %H:%M:%S"))
2314 Returns a list with no duplicates. Somewhat borrowed from:
2315 http://www.peterbe.com/plog/uniqifiers-benchmark (example f5)
2319 for item
in orig_list:
2320 if item
not in seen_items:
2321 seen_items[item] = 1
2322 uniqified_list.append(item)
2323 return uniqified_list
2331 FILE_USE_OPTS = [
'deletefiles',
'overwrite',
'use_existing']
2334 'l1brsgen':
'L1B_BRS',
2335 'l1aextract':
'L1A.sub',
2336 'l1aextract_viirs':
'L1A.sub',
2337 'l1aextract_seawifs':
'L1A.sub',
2338 'l1aextract_modis':
'L1A.sub',
2339 'l1mapgen':
'L1B_MAP',
2341 'l2brsgen':
'L2_BRS',
2342 'l2extract':
'L2.sub',
2344 'l2mapgen':
'L2B_MAP',
2348 'level 1b':
'L1B_LAC',
2351 input_file_data = {}
2354 if os.environ[
'OCSSWROOT']:
2355 OCSSWROOT_DIR = os.environ[
'OCSSWROOT']
2356 logging.debug(
'OCSSWROOT -> %s', OCSSWROOT_DIR)
2358 sys.exit(
'Error! Cannot find OCSSWROOT environment variable.')
2360 if __name__ ==
"__main__":