8 from netCDF4
import Dataset
9 from datetime
import datetime
as DT
15 from collections
import namedtuple
as NT
16 from numpy
import ones
as npones
17 from numpy
import float32
23 '''Specifies command line arguments and parses command line accordingly.'''
24 ocvarroot = os.getenv(
'OCVARROOT')
25 parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
26 description=
"Generates a HICO L1B file from an L0 file.")
27 parser.add_argument(
'-v',
'--version', action=
'version', version=
'%(prog)s ' + __version__)
28 parser.add_argument(
'-i',
'--l0file', type=str, required=
'True',
29 help=
'iss*.hico.bil (required)')
30 parser.add_argument(
'-c',
'--csvfile', type=str, help=
'iss*.hico.csv')
31 parser.add_argument(
'-r',
'--hdrfile', type=str, help=
'iss*.hico.hdr')
32 parser.add_argument(
'-l',
'--lpsfile', type=str,
33 default=os.path.join(ocvarroot,
'modis',
'leapsec.dat'),
34 help=
' leapsec file ')
35 parser.add_argument(
'-e',
'--earthfile', type=str,
36 default=os.path.join(ocvarroot,
'hico',
'finals.data'),
37 help=
' earth-orient file ')
38 parser.add_argument(
'-b',
'--boresight', nargs=3, type=float,
39 default=([-0.9957, 0.0268, -0.0128]),
40 help=(
'Process Bore Sight Parameters'))
41 parser.add_argument(
'-p',
'--pvqcsv', type=str,
42 help=
'iss*hico_pos_vel_quat.csv (Must be a CSV file) ')
43 parser.add_argument(
'-n',
'--navoffset', action=
'store_true', default=
False)
44 parser.add_argument(
'-o',
'--ofile', type=str,
45 help=
' Output netCDF filename ')
46 parser.add_argument(
'-d',
'--debug', action=
'store_true', default=
False)
47 parsedArgs = parser.parse_args(args)
48 l0basename = parsedArgs.l0file.split(
'.bil')[0]
49 if not parsedArgs.csvfile:
50 parsedArgs.csvfile =
'%s.csv' % l0basename
51 if not parsedArgs.hdrfile:
52 parsedArgs.hdrfile =
'%s.hdr' % l0basename
53 if not parsedArgs.pvqcsv:
54 parsedArgs.pvqcsv =
'%s_pos_vel_quat.csv' % l0basename
55 if not parsedArgs.ofile:
56 if "L0M" in parsedArgs.l0file:
57 parsedArgs.ofile =
'%s.L1BM.nc' % l0basename
59 parsedArgs.ofile =
'%s.L1b.nc' % l0basename
64 lgrName =
'l1bgen_hico_%s_T_%s' % (DT.date(DT.now()), DT.time(DT.now()))
65 lgr = logging.getLogger(lgrName)
69 fmt =
'%(asctime)s - %(name)s - %(levelname)s -\
70 [%(module)s..%(funcName)s..%(lineno)d] -\
72 formatter = logging.Formatter(fmt)
73 fh = logging.FileHandler(
'%s.log' % lgrName)
74 fh.setLevel(logging.DEBUG)
75 fh.setFormatter(formatter)
79 formatter = logging.Formatter(fmt)
80 ch = logging.StreamHandler()
81 ch.setLevel(logging.INFO)
82 ch.setFormatter(formatter)
85 lgr.debug(
'Logger initialized')
90 scale_factor = (dataMax - dataMin) / (2 ** bits - 1)
91 add_offset = dataMin + 2 ** (bits - 1) * scale_factor
92 return(scale_factor, add_offset)
96 root = os.getenv(
'OCSSWROOT')
97 locfile = os.path.join(root,
'var/hico/HICO_ID_SCENE_NAME_dict.pkl')
99 with open(locfile,
'rb')
as pklf:
100 loc_dict = pickle.load(pklf)
101 location = loc_dict.get(
str(scene_id),
'unknown')
103 except FileNotFoundError
as e:
112 def FillNC(root_grp_ptr, scene_location):
113 retGps =
NT(
"returnGroups",
"calGrp, productsGrp, navGrp, slaGrp, periodGrp")
114 root_grp_ptr.createDimension(
'samples', 512)
115 root_grp_ptr.createDimension(
'scan_lines', 2000)
116 root_grp_ptr.createDimension(
'bands', 128)
117 root_grp_ptr.instrument =
'HICO'
118 root_grp_ptr.institution =
'NASA Goddard Space Flight Center'
119 root_grp_ptr.resolution =
'100m'
120 root_grp_ptr.location_description = scene_location
121 root_grp_ptr.license =
'https://science.nasa.gov/earth-science/earth-science-data/data-information-policy/'
122 root_grp_ptr.naming_authority =
'gov.nasa.gsfc.sci.oceandata'
123 root_grp_ptr.date_created = DT.strftime(DT.utcnow(),
'%Y-%m-%dT%H:%M:%SZ')
124 root_grp_ptr.creator_name =
'NASA/GSFC'
125 root_grp_ptr.creator_email =
'data@oceancolor.gsfc.nasa.gov'
126 root_grp_ptr.publisher_name =
'NASA/GSFC'
127 root_grp_ptr.publisher_url =
'https://oceancolor.gsfc.nasa.gov'
128 root_grp_ptr.publisher_email =
'data@oceacolor.gsfc.nasa.gov'
129 root_grp_ptr.processing_level =
'L1B'
130 nav_grp = root_grp_ptr.createGroup(
'navigation')
132 nav_vars.append(nav_grp.createVariable(
'sensor_zenith',
'f4', (
'scan_lines',
'samples',)))
133 nav_vars.append(nav_grp.createVariable(
'solar_zenith',
'f4', (
'scan_lines',
'samples',)))
134 nav_vars.append(nav_grp.createVariable(
'sensor_azimuth',
'f4', (
'scan_lines',
'samples',)))
135 nav_vars.append(nav_grp.createVariable(
'solar_azimuth',
'f4', (
'scan_lines',
'samples',)))
136 nav_vars.append(nav_grp.createVariable(
'longitude',
'f4', (
'scan_lines',
'samples',)))
137 nav_vars.append(nav_grp.createVariable(
'latitude',
'f4', (
'scan_lines',
'samples',)))
139 var.units =
'degrees'
140 if var.name.endswith(
'zenith'):
145 var.long_name = var.name.replace(
'_',
' ').rstrip(
's')
146 retGps.navGrp = nav_grp
147 retGps.productsGrp = root_grp_ptr.createGroup(
'products')
148 lt = retGps.productsGrp.createVariable(
'Lt',
'u2', (
'scan_lines',
150 lt.scale_factor = float32([0.02])
151 lt.add_offset = float32(0)
152 lt.units =
"W/m^2/micrometer/sr"
154 lt.long_name =
"HICO Top of Atmosphere"
155 lt.wavelength_units =
"nanometers"
157 lt.fwhm = npones((128,), dtype=
'f4') * -1
159 lt.wavelengths = npones((128,), dtype=
'f4')
160 lt.wavelength_units =
"nanometers"
161 retGps.slaGrp = root_grp_ptr.createGroup(
'scan_line_attributes')
162 retGps.slaGrp.createVariable(
'scan_quality_flags',
'u1', (
'scan_lines',
165 meta_grp = root_grp_ptr.createGroup(
'metadata')
166 pl_info_grp = meta_grp.createGroup(
"FGDC/Identification_Information/Platform_and_Instrument_Identification")
167 pl_info_grp.Instrument_Short_Name =
"hico"
168 prc_lvl_grp = meta_grp.createGroup(
"FGDC/Identification_Information/Processing_Level")
169 prc_lvl_grp.Processing_Level_Identifier =
"Level-1B"
170 retGps.periodGrp = meta_grp.createGroup(
"FGDC/Identification_Information/Time_Period_of_Content")
172 retGps.calGrp = meta_grp.createGroup(
"HICO/Calibration")
178 Coordinates calls to hico l0 to l1b processes and manages data recording
184 mainLogger.info(
"l1bgen_hico %s" % __version__)
185 pvqcsv =
''.join(pArgs.pvqcsv)
186 navoffset = pArgs.navoffset
187 earth_orient_file =
''.join(pArgs.earthfile)
188 leap_sec_file =
''.join(pArgs.lpsfile)
190 hc =
HicoL0toL1b(pArgs, parentLoggerName=mainLogger.name)
193 if not os.path.exists(pvqcsv):
196 doNavTimeCorrection=navoffset)
197 assert os.path.exists(pvqcsv)
201 mainLogger.info(
"Running geolocation...")
203 hicogeo =
hico_geo(pvqcsv, earth_orient_file, leap_sec_file,
205 mainLogger.info(
"Writing to netcdf...")
207 sceneID =
str(hc.L0.header[
'ID'])
209 with Dataset(pArgs.ofile,
'w', format=
'NETCDF4')
as root_grp:
210 ncGroups =
FillNC(root_grp, scene_location=scene_location)
211 hc.WriteRadFile(ncGroups.productsGrp, ncGroups.periodGrp)
212 hicogeo.write_geo_nc(ncGroups.navGrp, ncGroups.calGrp)
213 mainLogger.info(
"NC file created.")
214 except BadFormatCSVException:
215 mainLogger.error(
"Failed to complete:")
216 mainLogger.error(
"CSV file is badly formatted")
217 mainLogger.error(
"unable to resolve issue.")
218 except AssertionError:
219 mainLogger.error(
"Failed to create PVQ file.")
220 except EmptyCSVException:
221 mainLogger.error(
"Failed to complete:")
222 mainLogger.error(
"CSV file is empty.")
224 mainLogger.error(
"Failed to complete:")
225 mainLogger.error(
"Failed geolocation")
226 except UserWarning
as uw:
227 mainLogger.warning(uw)
228 except USGNCCoarseTimeException:
229 mainLogger.error(
"Failed to complete:")
230 mainLogger.error(
"No overlap between coarse PV & epoch timestamps.")
231 mainLogger.error(
"Likely due to erroneous time data in CSV file.")
232 mainLogger.error(
"Cannot create *.hico_pos_vel_quat.csv file")
233 except PVQInterpolation
as pvqinterr:
234 mainLogger.error(
"interpolation error due to %s field in CSV file",
236 except Exception
as e:
237 mainLogger.error(
"Failed to complete:")
238 mainLogger.error(
"Undiagnosed failure")
239 mainLogger.error(
"%s" %e, exc_info=
True)
241 mainLogger.info(
"Finished processing %s" % pArgs.l0file)
244 if __name__ ==
'__main__':