OB.DAAC Logo
NASA Logo
Ocean Color Science Software

ocssw V2022
interpolateFields2hicotimes.py
Go to the documentation of this file.
1 '''
2 Created on Feb 1, 2016
3 
4 @author: rhealy
5 '''
6 
7 code_version = 1.0
8 code_author = 'R. Healy (richard.healy@nasa.gov) SAIC'
9 code_name = 'interpFields2HicoTimes.py'
10 code_date = '2/3/2016'
11 
12 def qtpow(q,pwr):
13  import sys
14  import numpy as np
15 
16  # NAME:
17  # qtpow
18  #
19  # Converted to python by R. Healy (2/9/2016) richard.healy@nasa.gov
20  #
21  # AUTHOR:
22  # Craig B. Markwardt, NASA/GSFC Code 662, Greenbelt, MD 20770
23  # craigm@lheamail.gsfc.nasa.gov
24  # UPDATED VERSIONs can be found on my WEB PAGE:
25  # http://cow.physics.wisc.edu/~craigm/idl/idl.html
26  #
27  # PURPOSE:
28  # Raise quaternion Q to the "power" POW
29  #
30  # MAJOR TOPICS:
31  # Geometry
32  #
33  # CALLING SEQUENCE:
34  # QNEW = QTPOW(Q, POW)
35  #
36  # DESCRIPTION:
37  #
38  # The function QTPOW raises a quaterion Q to the power P. The
39  # operation
40  #
41  # QNEW = QTPOW(Q, POW)
42  #
43  # is equivalent to
44  #
45  # QNEW = QTEXP( POW * QTLOG(Q))
46  #
47  # which is the same as the definition of raising a real number to
48  # any power (however, QTPOW is faster than using QTLOG and QTEXP).
49  #
50  # For integer values of POW, this form of exponentiation is also
51  # directly equivalent to the multiplication of that many Q's
52  # together.
53 
54  nq = q.size/4
55  npw = pwr.size
56 
57  if nq < 1 or npw < 1:
58  sys.exit('qtpow: Invalid array size')
59 
60  v = q[:,0:3]
61  sinth = np.sqrt(np.sum(np.power(v,2),axis=1))
62  th = np.arctan2(sinth, q[:,3])
63  rat = th*0
64  wh = (sinth != 0)
65  if wh.size > 0:
66  rat[wh] = np.sin(np.multiply(pwr[wh],th[wh]))/sinth[wh]
67  q1 = np.zeros(q.shape)
68  q1[:,3] = np.cos(np.multiply(th,pwr))
69 
70  q1[:,0:3] = np.multiply(np.tile(rat,(3,1)).T,v)
71  return q1
72 
73 def qtmult(aqt, bqt, inverse1=0, inverse2=0):
74  import numpy as np
75  import sys
76 
77  #This will only work with hico
78 
79  sz1 = aqt.shape
80  sz2 = bqt.shape
81  print('sz1,sz2=',sz1,sz2)
82  if sz1[0] < 1 or sz2[0] < 1:
83  sys.exit('ERROR A: Q1 and Q2 must be quaternions')
84  if sz1[1] != 4 or sz2[1] != 4:
85  sys.exit('ERROR B: Q1 and Q2 must be quaternions')
86 
87  n1 = aqt.size/4
88  n2 = bqt.size/4
89 
90  if n1 != n2 and n1 != 1 and n2 != 1:
91  sys.exit( 'ERROR: Q1 and Q2 must both have the same number of quaternions')
92 
93  #nq = np.max(n1,n2)
94 
95  cqt = np.zeros(aqt.shape)
96 
97  aqt0 = np.squeeze(aqt[:,0])
98  aqt1 = np.squeeze(aqt[:,1])
99  aqt2 = np.squeeze(aqt[:,2])
100  aqt3 = np.squeeze(aqt[:,3])
101 
102 
103  bqt0 = np.squeeze(bqt[:,0])
104  bqt1 = np.squeeze(bqt[:,1])
105  bqt2 = np.squeeze(bqt[:,2])
106  bqt3 = np.squeeze(bqt[:,3])
107 
108  if inverse1 > 0:
109  aqt0 = -aqt0
110  aqt1 = -aqt1
111  aqt2 = -aqt2
112 
113  if inverse2 > 0:
114  bqt0 = -bqt0
115  bqt1 = -bqt1
116  bqt2 = -bqt2
117 # print('aqt1=',aqt1.shape,'aqt2=',aqt2.shape,'aqt3=',aqt3.shape,'aqt0=',aqt0.shape)
118 # print('bqt1=',bqt1.shape,'bqt2=',bqt2.shape,'bqt3=',bqt3.shape,'bqt0=',bqt0.shape)
119 # print('mult=',np.multiply(aqt0,bqt3).shape)
120  cqt[:,0] = np.squeeze([ np.multiply(aqt0,bqt3) + np.multiply(aqt1,bqt2) - np.multiply(aqt2,bqt1) + np.multiply(aqt3,bqt0)] )
121  cqt[:,1] = np.squeeze([-np.multiply(aqt0,bqt2) + np.multiply(aqt1,bqt3) + np.multiply(aqt2,bqt0) + np.multiply(aqt3,bqt1)] )
122  cqt[:,2] = np.squeeze([ np.multiply(aqt0,bqt1) - np.multiply(aqt1,bqt0) + np.multiply(aqt2,bqt3) + np.multiply(aqt3,bqt2)] )
123  cqt[:,3] = np.squeeze([-np.multiply(aqt0,bqt0) - np.multiply(aqt1,bqt1) - np.multiply(aqt2,bqt2) + np.multiply(aqt3,bqt3)] )
124 
125  return cqt
126 
127 def qterp_slerp(t0, q0, t1):
128  from value_locate import value_locate
129  import numpy as np
130  import pandas as pd
131  # NAME:
132  # QTERP
133  # Converted to python by R. Healy (richard.healy@nasa.gov) 2/10/16
134  #
135  # AUTHOR:
136  # Translated to python by Rick Healy (richard.healy@nasa.gov)
137  #
138  # Craig B. Markwardt, NASA/GSFC Code 662, Greenbelt, MD 20770
139  # craigm@lheamail.gsfc.nasa.gov
140  # UPDATED VERSIONs can be found on my WEB PAGE:
141  # http://cow.physics.wisc.edu/~craigm/idl/idl.html
142  #
143  # PURPOSE:
144  # Smoothly interpolate from a grid of quaternions (spline or slerp)
145  #
146  # MAJOR TOPICS:
147  # Geometry
148  #
149  # CALLING SEQUENCE:
150  # QNEW = QTERP(TGRID, QGRID, TNEW, [/SLERP], QDIFF=, [/RESET])
151  #
152  # DESCRIPTION:
153  #
154  # The function QTERP is used to interplate from a set of known unit
155  # quaternions specified on a grid of independent values, to a new set
156  # of independent values. For example, given a set of quaternions at
157  # specified key times, QTERP can interpolate at any points between
158  # those times. This has applications for computer animation and
159  # spacecraft attitude control.
160  #
161  nq = np.int(q0.size/4)
162 
163  if nq == 1:
164  return np.tile(q0.T,(t1.size,1))
165 
166  qdiff = qtmult(q0[0:nq-1,:], q0[1:,:],inverse1=1)
167  wh = qdiff[:,3] <0
168  qn = qdiff[wh,3]
169  if qn.size > 0:
170  qdiff[wh,3] = -qdiff[wh,3]
171 
172  ii = value_locate(t0, t1)
173  hh = np.zeros(len(t1))
174  # Maybe there's a better way to do this, but for now this works.
175  # The way IDL does this is totally mysterious to me since it
176  # subtracts t0(ii) from t1 without reference to ii or the number of ii's
177  # and passing hh the same way
178  for i in np.arange(0,len(ii)):
179  if ii[i]>0 and ii[i]<nq-1:
180  hh[i] = (t1[i]-t0[ii[i]])/(t0[ii[i]+1]-t0[ii[i]])
181  ii2 = (ii[ii>0])
182  ii3 = (ii2[ii2<nq-1])
183  return qtmult(q0[ii3,:],qtpow(qdiff[ii3,:],hh[ii3]) )
184 #
185 # I've only implemented the slerp component of qterp
186 # since that's all we use. Here's the IDL code for the spline component
187 # for later implementation, if desired
188 # -rjh 2/10/16
189 #
190 # q1 = (q0(*,0) # t1) * 0
191 # for i = 0, 3 do $
192 # q1(i,*) = spl_interp(t0, q0(i,*), spl_init(t0, q0(i,*)), t1)
193 # tot = sqrt(total(q1^2,1))
194 # for i = 0, 3 do $
195 # q1(i,*) = q1(i,*) / tot
196 # return, q1
197 
198 
199 class Hico(object):
200 
201  def __init__(self,fileName,csvFileName,delta_odrcBTmGPS,iss_orientation,n_pixels=512,delta_texp=0,delta_ticugps=0,delta_tisspvq=0):
202  from HicoHeader import HicoHeader
203  from astreduc import JDay
204  import numpy as np
205  import pandas as pd
206  from scipy.interpolate import interp1d,UnivariateSpline
207  import math
208  import sys
209  from datetime import datetime
210 
211  self.begin_time = datetime.now()
212  self.csvFileName = csvFileName
213  infile_root = '.'.join(fileName.split('.')[0:-2])
214  print('infile_root={}'.format(infile_root))
215  self.outname = '{}_pos_vel_quat.csv'.format(infile_root)
216  self.anglename = '{}_LonLatViewAngles.bil'.format(infile_root)
217  self.n_pixels = n_pixels
218  self.delta_odrcBTmGPS = delta_odrcBTmGPS
219  # GPS Week 0 began at 00:00:00 UTC 1980 Jan 06
220  # sec in day*( (days in 4yr) * 7 sets +
221  # yr2008 - first five days of 1980)
222  # + leap seconds since 1980 Jan 6
223  self.gps_seconds_2009Jan01_00_00_00 = np.dtype('int32')
224  self.gps_seconds_2009Jan01_00_00_00 = 86400*((3 * 365 + 366)*7 +
225  366 - 5 ) + 15
226 
227  self.orientation = iss_orientation
228  # width of pulse. PPS records arrival, image not triggered 'til end.
229  self.trigger_pulse_width = 0.860e-3
230 
231 
232  hdr = HicoHeader(fileName)
233  self.header = hdr.header
234  self.L0 = hdr.L0
235  self.end_time = hdr.L0['end_time'][2]
236  self.nls=hdr.L0['n_image']
237  self.ffpps=hdr.header['FFpps']
238  self.ffpps_sub=hdr.header['FFppsSub']
239  self.lfpps=hdr.header['LFpps']
240  self.lfpps_sub=hdr.header['LFppsSub']
241  self.thetas=hdr.L0['ScenePointingAngle']
242 
243  print('estimated start_date=',hdr.L0["start_date"],hdr.L0["start_time"])
244  print('estimated end_date=',hdr.L0["end_date"],hdr.L0["end_time"])
245  start_t= hdr.L0["start_time"]
246  start_t[2]-=15
247  self.start_date, self.start_time = self.timeIncrement(hdr.L0["start_date"],start_t)
248  end_t= hdr.L0["end_time"]
249  end_t[2]+=15
250  self.end_date, self.end_time = self.timeIncrement(hdr.L0["end_date"],end_t)
251 
252  print('start_date=',self.start_date,self.start_time)
253  print('end_date=',self.end_date,self.end_time)
254  self.start_struct = self.getTimeStruct(self.start_date,self.start_time)
255  self.end_struct = self.getTimeStruct(self.end_date,self.end_time)
256 
257  print('Year for jday=',self.start_struct['year'])
258  self.Jday_start = JDay(self.start_struct['century']*100+self.start_struct['year'],self.start_struct['month'],self.start_struct['day'],
259  self.start_struct['hour'],self.start_struct['minute'],self.start_struct['second'])
260  self.Jday_end = JDay(self.end_struct['century']*100+self.end_struct['year'],self.end_struct['month'],self.end_struct['day'],
261  self.end_struct['hour'],self.end_struct['minute'],self.end_struct['second'])
262 
263  if delta_texp <= 0:
264  self.exptimes = hdr.header['TriggerCount']* 1.117460459e-6
265  self.cexp = '_expDEF'
266  else:
267  self.exptimes = hdr.header['TriggerCount']* 1.0
268  self.cexp = '_exp{}'.format(np.int(delta_texp*1.0e7))
269 
270  self.d_ticugps = delta_ticugps
271  if delta_ticugps == 0:
272  self.cdticu='_dticu0'
273  else:
274  self.cdticu='_dticu{}'.format(np.int(delta_ticugps*1.0e4))
275 
276 
277  self.d_tisspvq = delta_tisspvq
278  if delta_tisspvq == 0:
279  self.cdtpvq='_dpvq0'
280  else:
281  self.cdtpvq='_dpvq{}'.format(np.int(delta_tisspvq*1.0e4))
282 
283  self.hsdatf = pd.read_csv(csvFileName)
284 
285  self.hsdat = self.gethsdatrange(self.hsdatf,self.Jday_start,self.Jday_end)
286 
287  #self.hsdat=self.hsdatf.copy()
288  self.hsdat=self.hsdatf[self.jtB:self.jtE].set_index(np.arange(0,self.jtE-self.jtB)).copy()
289  print('hsdat shape=',self.hsdat.shape)
290  self.fixed_hwreg = 0
291  if self.hsdat.loc[0,'ICUTIMEHWREGSECOND'] > self.hsdat.loc[len(self.hsdat.loc[:,'ICUTIMEHWREGSECOND'])-1,'ICUTIMEHWREGSECOND']:
292  idx = self.hsdat['ICUTIMEHWREGSECOND'] < self.hsdat.loc[0,'ICUTIMEHWREGSECOND']
293  self.hsdat.loc[idx,'ICUTIMEHWREGSECOND']+=65536
294  self.fixed_hwreg = 1
295 
296  self.ffpps_all=self.ffpps+self.ffpps_sub*16.72e-6
297  self.lfpps_all=self.lfpps+self.lfpps_sub*16.72e-6
298 
299  if self.lfpps_all < self.ffpps_all:
300  self.ffpps_all+=65536
301 
302  self.hwreg=self.hsdat.loc[:,'ICUTIMEHWREGSECOND'] + self.hsdat.loc[:,'ICUTIMEHWREGSUBSECOND'] * 16.72e-6
303 
304  if self.fixed_hwreg == 1:
305  # at this point both ffpps_all and lfpps_all are either < 65536 or >= 65536
306  # since fixed_hwreg==1, that means we check one and add
307  if self.lfpps_all < self.hwreg[0]:
308  self.lfpps_all+=65536
309  if self.ffpps_all < self.hwreg[0]:
310  self.ffpps_all+=65536
311  print('lfpps:',self.lfpps_all,self.hwreg[len(self.hwreg)-1])
312  if self.lfpps_all < self.hwreg[0]:
313  sys.exit('\n================================================\n \
314  interpolate_fields_to_hico_times error: Error \n \
315  All hsm csv data is AFTER L0 file - check inputs \n \
316  L0 = {}\n CSV = {} \n \
317  ================================================ \n \
318  \n'.format(fileName,csvFileName))
319 
320  if self.ffpps_all > self.hwreg[len(self.hwreg)-1]:
321  sys.exit('\n================================================\n \
322  interpolate_fields_to_hico_times error: Error \n \
323  All hsm csv data is BEFORE L0 file - check inputs\n \
324  L0 = {}\n CSV = {} \n \
325  ================================================ \n \
326  \n'.format(fileName,csvFileName))
327 
328  if self.ffpps_all < self.hwreg[0]:
329  sys.exit('\n================================================ \n \
330  interpolate_fields_to_hico_times error: Error \n \
331  hsm csv data starts AFTER L0 starts file - check inputs\n \
332  L0 = {}\n CSV = {} \n \
333  ================================================ \n \
334  \n'.format(fileName,csvFileName))
335 
336  if self.lfpps_all > self.hwreg[len(self.hwreg)-1]:
337  sys.exit('\n================================================ \n \
338  interpolate_fields_to_hico_times error: Error \n \
339  hsm csv data ends BEFORE L0 file ends- check inputs\n \
340  L0 = {}\n CSV = {} \n \
341  ================================================ \n \
342  \n'.format(fileName,csvFileName))
343 
344  # the last of these is the tick just before the camera starts
345  # This is used to get the starting time in the HICO file.
346  #self.hwreg_locs_lt=self.hwreg[self.hwreg <= self.ffpps_all]
347  #self.just_before=len(self.hwreg_locs_lt)-2
348  self.locs_lt = self.hwreg <= self.ffpps_all
349  self.idx_lt = self.locs_lt.index[self.locs_lt == True]
350  # the first of these is the tick just after the camera stops
351  self.hwreg_locs_gt=self.hwreg[self.hwreg >= self.lfpps_all]
352  self.just_after=0
353 
354  # "t" are the times associated with the ISSPOSITIONS, and, perhaps more
355  # basically, with the PPS signals.
356  # For this, I need to convert the CC-YY-MM-DD-HH:MM:SS.ssss into "seconds
357  # since the start of the file" - NEEDS TO BE WRITTEN - I was going to make
358  # another subroutine (well, function of program to IDL) to do this. "t"
359  # has one "time" per lines in the file.
360  #
361  # really, this is t_icu, the broadcast time that the PPS arrived
362 
363  self.t_icu = self.time_to_seconds(self.hsdat['ICUTIMEISSHOUR'].values,
364  self.hsdat['ICUTIMEISSMINUTE'].values,
365  self.hsdat['ICUTIMEISSSECOND'].values,
366  self.hsdat['ICUTIMEISSSUBSECOND'].values)
367  self.t_icugps = self.hsdat.loc[:,'ICUTIMEGPSSECONDS'].values + 1.e-6 * self.hsdat.loc[:,'ICUTIMEISSSUBSECOND'].values
368 
369 
370  # need to add subseconds? used for Star tracker
371  self.t_issgps = self.hsdat.loc[:,'_ISSGPSTIME'].values + 1.e-6 * self.hsdat.loc[:,'ISSTIMESUBSECOND'].values
372  self.t_iss = self.time_to_seconds(self.hsdat['ISSTIMEHOUR'].values,
373  self.hsdat['ISSTIMEMINUTE'].values,
374  self.hsdat['ISSTIMESECOND'].values,
375  self.hsdat['ISSTIMESUBSECOND'].values)
376  # Need to take of the Broadcast time - GPS time difference. Even though the
377  # BAD times are marked as GPS, they are not held strictly to GPS. So we
378  # need to take care of the difference. One of the (now required) input
379  # arguments is delta_odrcBTmGPS.
380  # Now Bill references the _ISSGPSSECONDS in his email. This seems to be
381  # exactly the same as the ICUTIMEGPSSECONDS. Now the notes don't say what
382  # the source of the latter is, except when you take the fields that they
383  # claim are from the broadcast ancillary data, they add up to the same
384  # number of seconds.
385  # 2012/12/03 test 0 time offset by commenting
386 
387  self.t_issgps-=delta_odrcBTmGPS # This is the sense Bill uses.
388  self.t_icugps-=delta_odrcBTmGPS # This is the sense Bill uses.
389  self.t_icugps+=self.d_ticugps # 2012/01/17 for testing
390 
391  # Now the time of the first line is the time at the location in the CSV
392  # file + the difference after since the last update?
393  # To get the time start, linearly interpolate ffpps_all into the regime
394  # between the PPSCounters (hwreg above) at the known times (t). Voila! You
395  # have the start time in seconds since the first time in this (small)
396  # HSM_CSV file.
397 
398  # hwstart is fraction of interval
399  #self.hwstart= (self.ffpps_all-self.hwreg_locs_lt[self.just_before])/(self.hwreg_locs_lt[self.just_before + 1] -
400  # self.hwreg_locs_lt[self.just_before])
401  self.hwstart= (self.ffpps_all-self.hwreg[self.idx_lt[-2]])/(self.hwreg[self.idx_lt[-1]] -
402  self.hwreg[self.idx_lt[-2]])
403 
404  # time_start is the start time of the first line wrt to the start of the
405  # "time" file
406  self.time_start = self.hwstart*(self.t_icugps[self.idx_lt[-1]] -
407  self.t_icugps[self.idx_lt[-2]]) + self.t_icugps[self.idx_lt[-2]]
408 
409  # These are the times we want. The start time is the; all wrt start time in
410  # the time file.
411  # Addition of 0.860 ms : an email on Aug. 27 by Dan Korwan explains
412  # this. Basically, the trigger happens before the image. The recorded time
413  # (register) is at the start of the pulse, but the image is not obtained
414  # until the end of the pulse, which is 0.860 ms later.
415  # hico_times = time_start + exptimes[i]*indgen(nls[i]) + 0.860d0
416  # hico_times = time_start + exptimes[i]*indgen(nls[i]) + 0.860d-3 ; 2010 Sep 2
417  # hico_times = time_start + exptimes[i]*indgen(nls[i]) + trigger_pulse_width ; 2010 Sep 3
418  # Added 1/2 frame time to better center
419  self.scanrange = np.arange(self.nls,dtype=float)
420  self.testrange = np.multiply(self.scanrange,self.exptimes)
421  self.hico_times = np.add(np.add(self.time_start, self.testrange),(self.trigger_pulse_width + 0.5*self.exptimes))
422  #self.hico_times = self.time_start + self.exptimes* + self.trigger_pulse_width + 0.5*self.exptimes # 2013Jan17
423  self.locs_goodt=(self.hsdat['USGNC_PS_Pointing_Coarse_Time_Tag'] >= (self.hico_times[0]-10)) \
424  & (self.hsdat['USGNC_PS_Pointing_Coarse_Time_Tag'] <= (self.hico_times[-1]+10))
425  self.idx_goodt = self.locs_goodt[self.locs_goodt == True].index
426 
427  if len(self.idx_goodt) == 0:
428  sys.exit('No valid USGNC Coarse times in range of hico_times\n')
429 
430  #self.u_usgnc = pd.unique(self.hsdat.loc[self.idx_goodt,'USGNC_PS_Pointing_Coarse_Time_Tag']) #.values.ravel())
431  # now only get indexes from unique times
432  self.u_usgnc = self.hsdat.loc[self.idx_goodt,'USGNC_PS_Pointing_Coarse_Time_Tag'].duplicated() #.values.ravel())
433  self.udx_usgnc = self.u_usgnc.index[self.u_usgnc == False]
434  if len(self.udx_usgnc) < 4:
435  sys.exit('Less than 4 unique USGNC times during HICO collect\n')
436 
437  u_usgnc_coarse=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Coarse_Time_Tag'].values
438  u_usgnc_fine=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_PD_Fine_Pointing_Fine_Time_Tag'].values
439  u_usgnc_rx=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Posn_VectorX'].values
440  u_usgnc_ry=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Posn_VectorY'].values
441  u_usgnc_rz=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Posn_VectorZ'].values
442  u_usgnc_vx=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Vel_VectorX'].values
443  u_usgnc_vy=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Vel_VectorY'].values
444  u_usgnc_vz=self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Inert_Vel_VectorZ'].values
445  u_usgnc_q0=np.array(self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Current_Inert_Att_Quatrn_0'].values)
446  self.u_usgnc_q1=np.array(self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Current_Inert_Att_Quatrn_1'].values)
447  u_usgnc_q2=np.array(self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Current_Inert_Att_Quatrn_2'].values)
448  u_usgnc_q3=np.array(self.hsdat.loc[self.udx_usgnc,'USGNC_PS_Pointing_Current_Inert_Att_Quatrn_3'].values)
449 
450  self.t_issposvelquat=u_usgnc_coarse + u_usgnc_fine/256.
451  self.t_issposvelquat+=self.d_tisspvq
452  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_rx)
453  self.ISSPOSITIONX = f(self.hico_times)
454  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_ry)
455  self.ISSPOSITIONY = f(self.hico_times)
456  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_rz)
457  self.ISSPOSITIONZ = f(self.hico_times)
458  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_vx)
459  self.ISSVELOCITYX = f(self.hico_times)
460  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_vy)
461  self.ISSVELOCITYY = f(self.hico_times)
462  f = UnivariateSpline(self.t_issposvelquat, u_usgnc_vz)
463  self.ISSVELOCITYZ = f(self.hico_times)
464 
465  self.issqt = np.matrix([self.u_usgnc_q1.T,u_usgnc_q2.T,u_usgnc_q3.T,u_usgnc_q0.T]).T
467 
468  for k in np.arange(0,self.issqt_hicotimes.shape[1]):
469  if any(np.isnan(self.issqt_hicotimes[:,k])):
470  sys.exit('returning: NANs detected in interpolated quaternions')
471 
472  print('HSTCLOCKTIME0=',self.hsdat['HSTCLOCKTIME0'])
473  self.hstclocktime = self.hsdat['HSTCLOCKTIME0']*0.050 + self.hsdat['HSTCLOCKTIME1'] * 62.5e-9
474  if all(t==0 for t in self.hstclocktime) or all(t==0 for t in self.hsdat['HSTCLOCKTIME0']):
475  # no star tracker telemetry?
476  nht=size(self.hico_times)
477  hstqt_status=np.array([2]*nht,dtype=int)
478  hstqt_hicotimes=np.zeros((4,nht),dtype='f8')
479  else:
480  # t is the elapsed times wrt to the start time in the
481  # file, at the PPS signal.
482 
483  self.hstattitudetime=self.hsdat['HSTATTITUDETIME0']*0.050 + self.hsdat['HSTATTITUDETIME1'] * 62.5e-9
484 
485  self.hstqt=np.array([self.hsdat['HSTATTITUDEQUATX'],self.hsdat['HSTATTITUDEQUATY'],
486  self.hsdat['HSTATTITUDEQUATZ'],self.hsdat['HSTATTITUDEQUATS']]).T
487  print('shapes=',self.hstattitudetime.shape,self.hstqt.shape,self.hstclocktime.shape)
488 
489  # The interp below takes the "attitude" times to the "clock times",
490  # i.e. interps the quaternions at their measured times to the times of the
491  # PPS.
492 
494  if any(math.isnan(t) for t in self.hstqt_pps[:,3]):
495  bad=math.isnan(self.hstqt_pps)
496  self.hstqt_pps[bad]=0.0
497  print('WARNING: NANs detected in unused star tracker quaternions, part a')
498  print('reference_file = ',csvFileName)
499 
500  # the appropriate times are the ISS times according to Dan
501  # Quaternion interpolation from the PPS times to the hico_times.
502 
504  f = UnivariateSpline(self.hstattitudetime,self.hsdat['HSTATTITUDESTATUSMODE'])
505  ytemp = f(self.hstclocktime)
506  f = UnivariateSpline(self.t_issgps,ytemp)
507  self.hstqt_status = (f(self.hico_times)).astype(int)
508 
509  print('self.hstqt_status=',self.hstqt_status.shape)
510  print('hstqt_hicotimes shape=',self.hstqt_hicotimes.shape)
511  print('hstqt_hicotimes =',self.hstqt_hicotimes)
512  #self.hstqt_hicotimes[0,0] = float('NaN')
513  for k in np.arange(0,self.hstqt_hicotimes.shape[1]):
514  if any(np.isnan(self.hstqt_hicotimes[:,k])):
515  bad=np.isnan(self.hstqt_hicotimes[:,k])
516  self.hstqt_hicotimes[bad,k]=0.0
517  self.hstqt_status[bad]=2 # "test" wherever there is a NAN
518  print('WARNING: NANs detected in unused star tracker quaternions, part b')
519  print('reference_file = ',csvFileName)
520 
521  self.finish_time = datetime.now()
522 
523  def gethsdatrange(self,hsdatf,Jday_start,Jday_end):
524  from astreduc import JDay
525  import numpy as np
526  import sys
527 
528 # year = 100*hsdatf.loc[0,'ISSTIMECENTURY']+hsdatf.loc[0,'ISSTIMEYEAR']
529 # month = hsdatf.loc[0,'ISSTIMEMONTH']
530 # day = hsdatf.loc[0,'ISSTIMEDAY']
531 # hour = hsdatf.loc[0,'ISSTIMEHOUR']
532 # minute = hsdatf.loc[0,'ISSTIMEMINUTE']
533 # second = hsdatf.loc[0,'ISSTIMESECOND']
534 
535  jt0 = 0
536  Jday_now = JDay(100*hsdatf.loc[0,'ISSTIMECENTURY']+hsdatf.loc[0,'ISSTIMEYEAR'],
537  hsdatf.loc[0,'ISSTIMEMONTH'],hsdatf.loc[0,'ISSTIMEDAY'],
538  hsdatf.loc[0,'ISSTIMEHOUR'],hsdatf.loc[0,'ISSTIMEMINUTE'],
539  hsdatf.loc[0,'ISSTIMESECOND'])
540  jtend = len(hsdatf)
541  while (jt0 < jtend) and (Jday_now < Jday_start):
542  jt0+=1
543  Jday_now = JDay(100*hsdatf.loc[jt0,'ISSTIMECENTURY']+hsdatf.loc[jt0,'ISSTIMEYEAR'],
544  hsdatf.loc[jt0,'ISSTIMEMONTH'],hsdatf.loc[jt0,'ISSTIMEDAY'],
545  hsdatf.loc[jt0,'ISSTIMEHOUR'],hsdatf.loc[jt0,'ISSTIMEMINUTE'],
546  hsdatf.loc[jt0,'ISSTIMESECOND'])
547  if jt0 >= jtend:
548  sys.exit('CSV file out of time range')
549 
550  jtE = jt0
551  while (jtE < jtend) and (Jday_now <= Jday_end):
552  jtE+=1
553  Jday_now = JDay(100*hsdatf.loc[jtE,'ISSTIMECENTURY']+hsdatf.loc[jtE,'ISSTIMEYEAR'],
554  hsdatf.loc[jtE,'ISSTIMEMONTH'],hsdatf.loc[jtE,'ISSTIMEDAY'],
555  hsdatf.loc[jtE,'ISSTIMEHOUR'],hsdatf.loc[jtE,'ISSTIMEMINUTE'],
556  hsdatf.loc[jtE,'ISSTIMESECOND'])
557  print('Hr/Min/Sec=',hsdatf.loc[jtE,'ISSTIMEHOUR'],hsdatf.loc[jtE,'ISSTIMEMINUTE'],
558  hsdatf.loc[jtE,'ISSTIMESECOND'])
559  if jtE > jtend:
560  sys.exit('CSV file out of time range')
561 
562 
563  self.jtB = jt0
564  self.jtE = jtE+2
565 
566  return hsdatf[self.jtB:self.jtE].set_index(np.arange(0,self.jtE-self.jtB)).copy()
567 
568  def createCSVInfo(self):
569 
570  import getpass
571  import socket,os,platform
572 
573 
574  csvinfo = {}
575  csvinfo['This file name']=os.path.basename(self.outname)
576  csvinfo['Source CSV file']=self.csvFileName
577  csvinfo['Subset CSV file']='None'
578  csvinfo['Expected Lon/Lat/View angle filename']=os.path.basename(self.anglename)
579  csvinfo['Epoch']='2009 Jan 01 00:00:00 UTC'
580  csvinfo['Requested number of pixels']='{0:4d}'.format(self.n_pixels)
581  csvinfo['Distance Unit']='Feet'
582  csvinfo['Central Body']='Earth'
583  csvinfo['CoordinateSystem']='J2000'
584  csvinfo['Theta (degrees from stowed position)']='{}'.format(self.thetas)
585 
586  csvinfo['Code name'] = code_name
587  csvinfo['Code version'] = code_version
588  csvinfo['Code date'] = code_date
589  csvinfo['Code author'] = code_author
590  csvinfo['Code executed on computer'] = socket.gethostname()
591  csvinfo['Code executed by username'] = getpass.getuser()
592  csvinfo['Code run under Python osfamily'] = os.name
593  csvinfo['Code run under Python os'] = platform.release()
594  csvinfo['Code start time'] = self.begin_time.strftime("%H:%M:%S")
595  csvinfo['Code end time'] = self.finish_time.strftime("%H:%M:%S")
596  csvinfo['Exposure interval (frame time)'] = '{}'.format(self.exptimes)
597  csvinfo['ISS orientation']='{}'.format(self.orientation)
598  csvinfo['Trigger pulse width (s)']='{}'.format(self.trigger_pulse_width)
599  csvinfo['ODRC broadcast time - gps time (s)']='{}'.format(self.delta_odrcBTmGPS)
600  csvinfo['delta_ticugps (s)']='{}'.format(self.d_ticugps)
601  csvinfo['delta_tisspvq (s)']='{}'.format(self.d_tisspvq)
602 
603  self.csvinfo = csvinfo
604 
605  return
606 
607  def writeCSVfile(self):
608  import numpy as np
609 
610  fcsv = open(self.outname,"w")
611 
612  fcsv.write('\n')
613 
614  for key in sorted(self.csvinfo.keys()):
615  fcsv.write('{}, {}\n'.format(key,self.csvinfo[key]))
616 
617  fcsv.write('\n')
618  fcsv.write('SecondsSinceEpoch, ISSPOSX, ISSPOSY, ISSPOSZ, ISSVELX, ISSVELY, ISSVELZ, ISSQX, ISSQY, ISSQZ, ISSQS, STQX, STQY, STQZ, STQS, HST_ATTITUDE_STATUS\n')
619 
620  ofmt='{:20.6f},'*7 +'{:20.8f},'*8 +'{:03d}\n'
621  for iqq in np.arange(0,len(self.hico_times)):
622  fcsv.write(ofmt.format(self.hico_times[iqq]-self.gps_seconds_2009Jan01_00_00_00,
623  self.ISSPOSITIONX[iqq],
624  self.ISSPOSITIONY[iqq],
625  self.ISSPOSITIONZ[iqq],
626  self.ISSVELOCITYX[iqq],
627  self.ISSVELOCITYY[iqq],
628  self.ISSVELOCITYZ[iqq],
629  self.issqt_hicotimes[iqq][0],
630  self.issqt_hicotimes[iqq][1],
631  self.issqt_hicotimes[iqq][2],
632  self.issqt_hicotimes[iqq][3],
633  self.hstqt_hicotimes[iqq][0],
634  self.hstqt_hicotimes[iqq][1],
635  self.hstqt_hicotimes[iqq][2],
636  self.hstqt_hicotimes[iqq][3],
637  self.hstqt_status[iqq]))
638 
639 
640  fcsv.close()
641 
642 
643  @staticmethod
644  def time_to_seconds(hh,nn,ss,subsec):
645 
646 
647  # This is for HICO, CC will be constant.
648  # This unwritten subroutine is called to get times in seconds for the
649  # HSM_CSV file. At most there is one day turnover - that is the ONLY
650  # important turnover for us. This is where HH goes from 23 to 00 since the
651  # observations are only ~30 seconds long. Don't care about cc,yy,mm,dd
652 
653  # hh,nn,ss,subsec are input arrays of the same length.
654  # elapsed is the output array of the same length.
655 
656 
657  hhturn = hh < hh[0]
658  count = len(hhturn)
659  if count > 0:
660  hh[hhturn]+=24
661 
662  secs_in_day = subsec*1.e-6 + ss + 60.*(nn + hh*60.)
663  return secs_in_day-secs_in_day[0]
664 
665 
666  @staticmethod
667  def time_to_seconds(hh,nn,ss,subsec):
668 
669 
670  # This is for HICO, CC will be constant.
671  # This unwritten subroutine is called to get times in seconds for the
672  # HSM_CSV file. At most there is one day turnover - that is the ONLY
673  # important turnover for us. This is where HH goes from 23 to 00 since the
674  # observations are only ~30 seconds long. Don't care about cc,yy,mm,dd
675 
676  # hh,nn,ss,subsec are input arrays of the same length.
677  # elapsed is the output array of the same length.
678 
679 
680  hhturn = hh < hh[0]
681  count = len(hhturn)
682  if count > 0:
683  hh[hhturn]+=24
684 
685  secs_in_day = subsec*1.e-6 + ss + 60.*(nn + hh*60.)
686  return secs_in_day-secs_in_day[0]
687 
688 
689  @staticmethod
690  def getTimeStruct(date,time):
691  import numpy as np
692 
693  return {"century":np.int(date[0]/100),"year":(date[0] % 100),"month":date[1],
694  "day":date[2],"hour":np.int(time[0]),
695  "minute":np.int(time[1]),"second":np.int(time[2])}
696 
697  @staticmethod
698  def timeIncrement(date,time):
699  from astreduc import isLeapYear
700  import numpy as np
701 
702  year = date[0]
703  month = date[1]
704  day = date[2]
705  hour = time[0]
706  minute= time[1]
707  second= time[2]
708 
709  if second >= 60:
710  mmm = np.floor(second/60)
711  second = np.mod(second,60)
712  minute += mmm
713 
714  while second < 0:
715  second += 60
716  minute -= 1
717 
718  if minute >= 60:
719  hhh = np.floor(minute/60)
720  minute = np.mod(minute,60)
721  hour+=hhh
722 
723  while minute < 0:
724  minute += 60
725  hour -= 1
726 
727  if hour >= 24:
728  ddd = np.floor(minute/24)
729  hour = np.mod(minute,24)
730  day+=ddd
731 
732  while hour < 0:
733  hour += 24
734  day -= 1
735 
736  while month > 12:
737  month -= 12
738  year += 1
739 
740  while month < 1:
741  month += 12
742  year -= 1
743 
744  last=np.array([31,31,28+isLeapYear(year),31,30,31,30,31,31,30,31,30,31,31])
745 
746  while day > last[month-1]:
747  day-=last[month-1]
748  month+=1
749  if month > 12:
750  month-=12
751  year+=1
752  last=np.array([31,31,28+isLeapYear(year),31,30,31,30,31,31,30,31,30,31,31])
753 
754  while day < 1:
755  day+=last[month-1]
756  month+=1
757  if month < 1:
758  month+=12
759  year-=1
760  last=np.array([31,31,28+isLeapYear(year),31,30,31,30,31,31,30,31,30,31,31])
761 
762  while month > 12:
763  month-=12
764  year+=1
765 
766  while month < 1:
767  month+=12
768  year-=1
769 
770  return np.array([year,month,day]),np.array([hour,minute,second])
771 
772 
773 def writeHicoTimes(hico):
774  hico.createCSVInfo()
775  print(newH.issqt_hicotimes.shape,newH.hstqt_hicotimes.shape)
776 # print('a={:20.8f},b={:20.8f},c={:20.8f},d={:20.8f},e={:02d}'.format(hico.hstqt_hicotimes[0][0],hico.hstqt_hicotimes[0][1],hico.hstqt_hicotimes[0][2],hico.hstqt_hicotimes[0][3],hico.hstqt_status[0]))
777  hico.writeCSVfile()
778 
779  return
780 
781 def get_odrc_time_offset(filename):
782  import sys
783  import numpy as np
784  for theLine in open(filename,'r') :
785  fields = theLine.split('=')
786  if len(fields) > 1 and 'odrc_time_offset' in fields[0]:
787  return np.float(fields[1])
788 
789  sys.exit('odrc_time_offset not in file {}'.format(filename))
790 
791 if __name__ == '__main__':
792  import argparse
793 
794  parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description='''\
795  Generate a HICO geometry file.
796 
797  ''', add_help=True)
798  parser.add_argument('-ifile', nargs=1, type=str, help=' iss*hico.bil input file (Must be a BIL file) ')
799  parser.add_argument('-csvfile', nargs=1, type=str, help=' cvs file ')
800  parser.add_argument('-hdr', nargs=1, type=str, help=' header file ')
801  parser.add_argument('-orient', default=('-XVV'),nargs=1, type=str, help=' iss orientation file ')
802  args = parser.parse_args('-ifile /home/rhealy/src/python/hico/l02l1b/iss.2013067.0308.063527.L0.12933.20130308205743.hico.bil -csvfile /home/rhealy/src/python/hico/l02l1b/iss.2013067.0308.063527.L0.12933.20130308205743.hico.csv -hdr /home/rhealy/src/python/hico/l02l1b/iss.2013067.0308.063527.L0.12933.20130308205743.hico.hdr'.split())
803 
804  ifile = ''.join(args.ifile)
805  csvfile = ''.join(args.csvfile)
806  hdrfile = ''.join(args.hdr)
807  orient = ''.join(args.orient)
808 
809  odrc_time_offset = get_odrc_time_offset(hdrfile)
810 
811  newH = Hico(ifile,csvfile,odrc_time_offset,orient)
812 
813  writeHicoTimes(newH)
814 
815 # print(newH.issqt_hicotimes[:,2])
816 # print(newH.start_date)
817 # print(newH.start_time)
818 # print(newH.start_struct)
819 # print(newH.end_struct)
820 # print(newH.L0["start_time"])
821 #
822 # print('Julday=',newH.Jday_start)
823 # print(newH.cexp)
824 # print(newH.ffpps_all)
825 # print(newH.t_icu)
826 # print(newH.hsdat['ICUTIMEISSSECOND'].values)
827 # print(newH.ffpps_all)
828 # print(newH.hwstart)
829 # idx = newH.locs_lt.index[newH.locs_lt == True]
830 # print(idx[-2])
831 # print(newH.time_start)
832 # print(newH.scanrange)
833 # print(newH.testrange)
834 # print(newH.hico_times)
835 # #print(newH.idx_goodt.values)
836 # print(newH.udx_usgnc.values)
837 # print(newH.ISSPOSITIONX)
838 # print(newH.ISSPOSITIONX)
839 # print('u_usgnc_q1=',newH.u_usgnc_q1)
840 # print('u_usgnc_q1=',len(newH.u_usgnc_q1))
841 # print("shape issqt=",newH.issqt.shape)
842 # print(newH.issqt[0,1])
843 # shp=newH.issqt.shape
844 # aout=np.zeros((2,4,47,5,188))
845 # print("shape=",shp,aout.shape)
846 # print(aout[0].shape)
847 # print(len(newH.issqt))
848 # print('hico times=',newH.issqt_hicotimes)
849 # print('hstclocktime=',newH.hstclocktime)
850 # s=newH.hsdat[:10].copy()
851 # print(s)
852 # print(newH.jtB,newH.jtE)
def value_locate(refx, x)
Definition: value_locate.py:11
def gethsdatrange(self, hsdatf, Jday_start, Jday_end)
def qtmult(aqt, bqt, inverse1=0, inverse2=0)
double precision function f(R1)
Definition: tmd.lp.f:1454
void copy(double **aout, double **ain, int n)
def isLeapYear(yyyy)
Definition: astreduc.py:117
def JDay(Year, Mon, Day, Hr, minute, Sec)
Definition: astreduc.py:425
def __init__(self, fileName, csvFileName, delta_odrcBTmGPS, iss_orientation, n_pixels=512, delta_texp=0, delta_ticugps=0, delta_tisspvq=0)