1
2
3
4 """
5 Check the fits headers against CASU files and add those missing with data
6 from the database (use with pyfits >3.1.0).
7
8 Update pix and cats seperately, uncomment the outDir and fitsDir
9 accordingly. Get the list of missing keys by running with the -M option
10 and update DataResults and fitsKeyDict.
11 Run a test (-t) on a slice of fileList first. If the code breaks, don't
12 rerun on processed files, find the new start point first.
13 Run a final check on headers.final.*: First copy CASU headers into
14 headers.final.*, then read new headers with -F -H, then get the differences
15 with -F -D.
16
17 @author: E. Sutorius
18 @org: WFAU, IfA, University of Edinburgh
19
20 @newfield contributors: Contributors, Contributors (Alphabetical Order)
21 @contributors: R.S. Collins
22 """
23
24 from collections import defaultdict, namedtuple
25 import dircache
26 import os
27 import re
28 import shutil
29 import time
30
31 from invocations.cu1.cu1Transfer import CASUQueries
32 from wsatools.CLI import CLI
33 from wsatools.DbConnect.DbSession import DbSession, Join
34 from wsatools.File import File
35 import wsatools.FitsUtils as fits
36 from wsatools.Logger import ForLoopMonitor, Logger
37 from wsatools.SystemConstants import SystemConstants
38 import wsatools.Utilities as utils
42 """ Checks FITS files for missing multiframe IDs or WSATimestamps.
43 """
44
45
46
47
48 archive = None
49 database = None
50 fitsKeys = defaultdict()
51 fitsKeys["Multiframe"] = ["multiframeID", "fileTimeStamp"]
52 extractHeaders =False
53 fixFixed =False
54 finalCheck = False
55 onlyBackupFiles = False
56 onlyMissing = False
57 onlyTime = False
58 pixOrCat = None
59 printDiffs = False
60
61 DataResults = defaultdict()
62
63
64 fitskeyDict = {"VSA":{
65 "pix": {
66 "DECZP01": "CurrentAstrometry.refDecShift01",
67 "DECZP12": "CurrentAstrometry.refDecShift12",
68 "DECZP22": "CurrentAstrometry.refDecShift22",
69 "DRIBBLE": "MultiframeDetector.dribble",
70 "ELLIPTIC": "MultiframeDetector.avStellarEll",
71 "EXTINCT": "MultiframeDetector.extinctionExt",
72 "MAGZPT": "MultiframeDetector.photZPCat",
73 "MAGZRR": "MultiframeDetector.photZPErrCat",
74 "NICOMB": "MultiframeDetector.nCombine",
75 "PERCORR": "MultiframeDetector.skyCorrExt",
76 "RAZP01": "CurrentAstrometry.refRaShift01",
77 "RAZP12": "CurrentAstrometry.refRaShift12",
78 "RAZP22": "CurrentAstrometry.refRaShift22",
79 "SEEING": "MultiframeDetector.seeing",
80 "WCSPASS": "CurrentAstrometry.wcsPass"
81 },
82 "cat": {
83 "APCOR7": "MultiframeDetector.aperCor7",
84 "CROWDED": "MultiframeDetector.crowdedFlag",
85 "DECZP01": "CurrentAstrometry.refDecShift01",
86 "DECZP12": "CurrentAstrometry.refDecShift12",
87 "DESTRIPE": "MultiframeDetector.destripe",
88 "DRIBBLE": "MultiframeDetector.dribble",
89 "EBMVMED": "MultiframeDetector.ebmvMed",
90 "ELLIPTIC": "MultiframeDetector.avStellarEll",
91 "EXTINCT": "MultiframeDetector.extinctionCat",
92 "FILTFWHM": "MultiframeDetector.smoothing",
93 "MAGZPT": "MultiframeDetector.photZPCat",
94 "MAGZRR": "MultiframeDetector.photZPErrCat",
95 "MED_PA": "MultiframeDetector.medPa",
96 "MINPIX": "MultiframeDetector.minImageSize",
97 "NBSIZE": "MultiframeDetector.nbsize",
98 "NDITCOR": "MultiframeDetector.nditcor",
99 "NIGHTNUM": "MultiframeDetector.nightZPNum",
100 "NIGHTZPT": "MultiframeDetector.nightZPCat",
101 "NIGHTZRR": "MultiframeDetector.nightZPErrCat",
102 "NUMZPT": "MultiframeDetector.numZPCat",
103 "NYOUT": "MultiframeDetector.ySize",
104 "PERCORR": "MultiframeDetector.skyCorrCat",
105 "RAZP01": "CurrentAstrometry.refRaShift01",
106 "RAZP12": "CurrentAstrometry.refRaShift12",
107 "RCORE": "MultiframeDetector.coreRadius",
108 "SEEING": "MultiframeDetector.seeing",
109 "SKYLEVEL": "MultiframeDetector.skyLevel",
110 "SKYNOISE": "MultiframeDetector.skyNoise",
111 "STDCRMS": "CurrentAstrometry.stdCRms",
112 "STRPRMS": "MultiframeDetector.strprms",
113 "WCSPASS": "CurrentAstrometry.wcsPass"
114 }},
115 "WSA":{
116 "pix": {
117 "CIRMED": "MultiframeDetector.CIRMED",
118 "CIR_BVAR": "MultiframeDetector.CIR_BVAR",
119 "CIR_SCAL": "MultiframeDetector.CIR_SCAL",
120 "CIR_XOFF": "MultiframeDetector.ditherOffsetX",
121 "CIR_YOFF": "MultiframeDetector.ditherOffsetY",
122 "CIR_ZERO": "MultiframeDetector.CIR_ZERO",
123 "CURTNRNG": "MultiframeDetector.decurtainRange",
124 "DECURTN": "MultiframeDetector.decurtFlag",
125 "DECZP02": "CurrentAstrometry.refDecShift02",
126 "ELLIPTIC": "MultiframeDetector.avStellarEll",
127 "EXTINCT": "MultiframeDetector.extinctionExt",
128 "MAGZPT": "MultiframeDetector.photZPCat",
129 "MAGZRR": "MultiframeDetector.photZPErrCat",
130 "NUMBRMS": "CurrentAstrometry.numRms",
131 "RAZP02": "CurrentAstrometry.refRaShift02",
132 "READNOIS": "MultiframeDetector.readNoise",
133 "SEEING": "MultiframeDetector.seeing",
134 "SKYLEVEL": "MultiframeDetector.skyLevel",
135 "SKYNOISE": "MultiframeDetector.skyNoise",
136 "STDCRMS": "CurrentAstrometry.stdCRms",
137 "WCSPASS": "CurrentAstrometry.wcsPass",
138 "XTALK": "MultiframeDetector.xTalkFlag"
139 },
140 "cat": {
141 "CAMPOWER": "MultiframeDetector.camPower" ,
142 "CAMROLE": "MultiframeDetector.camRole" ,
143 "CAPPLICN": "MultiframeDetector.readOutApp" ,
144 "CC_PRES": "Multiframe.cryostPres" ,
145 "CIRMED": "MultiframeDetector.CIRMED" ,
146 "CIR_BVAR": "MultiframeDetector.CIR_BVAR" ,
147 "CIR_SCAL": "MultiframeDetector.CIR_SCAL" ,
148 "CIR_XOFF": "MultiframeDetector.ditherOffsetX" ,
149 "CIR_YOFF": "MultiframeDetector.ditherOffsetY" ,
150 "CIR_ZERO": "MultiframeDetector.CIR_ZERO" ,
151 "CNFINDEX": "MultiframeDetector.configIndex" ,
152 "CURTNRNG": "MultiframeDetector.decurtainRange" ,
153 "DECURTN": "MultiframeDetector.decurtFlag" ,
154 "DET_TEMP": "MultiframeDetector.detectorTemp" ,
155 "EXTINCT": "MultiframeDetector.extinctionCat" ,
156 "FILTFWHM": "MultiframeDetector.smoothing" ,
157 "GAIN": "MultiframeDetector.gain" ,
158 "NIGHTNUM": "MultiframeDetector.nightZPNum" ,
159 "NIGHTZPT": "MultiframeDetector.nightZPCat" ,
160 "NIGHTZRR": "MultiframeDetector.nightZPErrCat" ,
161 "NUMZPT": "MultiframeDetector.numZPCat" ,
162 "NYOUT": "MultiframeDetector.ySize" ,
163 "PCSYSID": "MultiframeDetector.pcSysID" ,
164 "PERCORR": "MultiframeDetector.skyCorrCat" ,
165 "PIXLSIZE": "MultiframeDetector.pixelScale" ,
166 "RDOUT_X1": "MultiframeDetector.startCol" ,
167 "RDOUT_X2": "MultiframeDetector.endCol" ,
168 "RDOUT_Y1": "MultiframeDetector.startRow" ,
169 "RDOUT_Y2": "MultiframeDetector.endRow" ,
170 "READNOIS": "MultiframeDetector.readNoise" ,
171 "READOUT": "MultiframeDetector.camReadOut" ,
172 "RUNID": "MultiframeDetector.runID" ,
173 "SC_TEMP": "Multiframe.scTemp" ,
174 "SDSUID": "MultiframeDetector.sdsuNum" ,
175 "SEEING": "MultiframeDetector.seeing" ,
176 "SKYLEVEL": "MultiframeDetector.skyLevel" ,
177 "SKYNOISE": "MultiframeDetector.skyNoise" ,
178 "STDCRMS": "CurrentAstrometry.stdCRms" ,
179 "WCSPASS": "CurrentAstrometry.wcsPass" ,
180 "XTALK": "MultiframeDetector.xTalkFlag" ,
181 }}}
182
183
184
186 """ Runs the FITS file check.
187 """
188 self.outDir = "headers.%s" % self.pixOrCat.lower()
189 self.fitsDir = "%ss.upd" % self.pixOrCat.lower()
190 if self.finalCheck:
191 self.outDir = "headers.final.%s" % self.pixOrCat.lower()
192 if self.isTrialRun:
193 self.fitsDir = "%s.test" % self.fitsDir
194 utils.ensureDirExist(self.outDir)
195 utils.ensureDirExist(self.fitsDir)
196
197
198 fileSuffix = (".fit" if self.pixOrCat == "pix" else ".fits")
199
200 Logger.addMessage("Getting mfIDs and filenames...")
201 mfidDict = defaultdict()
202 minVersNum = (10 if self.archive.sysc.isVSA() else 169)
203 result = self._getDbData(["PreviousMfDZP.multiframeID"],
204 "versNum >= %d" % minVersNum,
205 pixcat=self.pixOrCat)
206
207 origPathDict = defaultdict(set)
208 for i in result:
209 shortName = os.path.splitext(os.path.split(
210 i[0].rpartition(':')[2])[1])[0]
211 origPathDict[shortName].add(i[0].rpartition(':')[2])
212
213
214 counter = 0
215 for shortName in origPathDict:
216 if len(origPathDict[shortName]) > 1:
217 print "WARNING: %s: %s" % (shortName,
218 ','.join(origPathDict[shortName]))
219 counter += 1
220 if counter > 0:
221 raise SystemExit()
222
223 if self.isTrialRun:
224 mfidDict.update(dict((os.path.join(self.fitsDir, os.path.basename(
225 i[0].rpartition(':')[2])), i[1]) for i in result))
226 else:
227 mfidDict.update(dict((i[0].rpartition(':')[2], i[1])
228 for i in result))
229
230
231 filePathDict = defaultdict()
232 for fileName in mfidDict:
233 shortName = os.path.splitext(os.path.split(fileName)[1])[0]
234 filePathDict[shortName] = fileName
235
236
237 if self.extractHeaders:
238 Logger.addMessage("Getting headers...")
239 self.getHeaders(mfidDict)
240 raise SystemExit()
241
242 if self.isTrialRun:
243 fileList = sorted(filePathDict)[:2]
244 else:
245 fileList = sorted(filePathDict)[:5251]
246
247
248 if self.isTrialRun or not (self.finalCheck or self.printDiffs \
249 or self.fixFixed or self.onlyTime):
250 Logger.addMessage("Copying files into %s..." % self.fitsDir)
251 progress = ForLoopMonitor(fileList)
252 for fileName in fileList:
253 if not os.path.exists(os.path.join(
254 self.fitsDir, os.path.basename(filePathDict[fileName]))):
255 shutil.copy2(list(origPathDict[fileName])[0], self.fitsDir)
256 progress.testForOutput()
257
258 if self.onlyBackupFiles:
259 raise SystemExit()
260
261 if self.onlyTime:
262 timeFileList = fileList[:]
263 fileList = []
264
265
266 keyDict = defaultdict(lambda : defaultdict(lambda : defaultdict(list)))
267 missKeySet = set()
268
269 Logger.addMessage("Reading header infos from %s..." % self.outDir)
270 progress = ForLoopMonitor(fileList)
271 for fileName in fileList:
272 KeySets = defaultdict(set)
273 for x in ["casu", "wfau"]:
274 headFile = File(os.path.join(
275 self.outDir, "%s.%s.head" % (fileName, x)))
276 headFile.ropen()
277 lines = headFile.readlines()
278 headFile.close()
279 baseName, cw = headFile.root.rpartition('.')[::2]
280 lineNum = 0
281 for line in lines:
282 if "Header listing for HDU" in line:
283 hduNum = line.rpartition('#')[2].replace(':', '')
284 lineNum = 0
285 elif len(line) > 0:
286 if line.startswith("HISTORY"):
287 key, val = line.partition("HISTORY")[1:]
288 KeySets[cw].add(key)
289 keyDict[baseName][cw][hduNum].append(
290 [key, val, '', lineNum])
291 else:
292 if line.count('/') > 1 and "/home/" in line:
293 keyval, descr = line.rpartition('/')[::2]
294 else:
295 keyval, descr = line.partition('/')[::2]
296 key, val = keyval.partition('=')[::2]
297 KeySets[cw].add(key)
298 keyDict[baseName][cw][hduNum].append(
299 [key.strip(), val.strip().strip("'"),
300 descr.strip(), lineNum])
301 lineNum += 1
302 del headFile
303 if self.onlyMissing:
304 print "missing WFAU keys:", KeySets["casu"] - KeySets["wfau"]
305 missKeySet.update(KeySets["casu"] - KeySets["wfau"])
306 progress.testForOutput()
307
308 if self.onlyMissing:
309 print "All missing WFAU keys:", sorted(missKeySet)
310 raise SystemExit()
311
312
313 diffDict = defaultdict(lambda : defaultdict(list))
314 diffSet = set()
315
316 Logger.addMessage("Creating diff dict...")
317 progress = ForLoopMonitor(fileList)
318 for fileName in fileList:
319 for hduNum in sorted(keyDict[fileName]["casu"]):
320 wfauKeys = [x[0] for x in keyDict[fileName]["wfau"][hduNum]
321 if not "HISTORY" in x[0]]
322 wfauHist = [x[1].strip()
323 for x in keyDict[fileName]["wfau"][hduNum]
324 if "HISTORY" in x[0]]
325 for entry in keyDict[fileName]["casu"][hduNum]:
326 if entry[0] not in wfauKeys and \
327 entry not in keyDict[fileName]["wfau"][hduNum]:
328 if entry[0] == "HISTORY":
329 if entry[1].strip() not in wfauHist:
330 diffDict[fileName][int(hduNum)].append(
331 entry)
332 else:
333 diffDict[fileName][int(hduNum)].append(entry)
334 diffSet.add(entry[0])
335
336 progress.testForOutput()
337
338 if self.printDiffs:
339 outFile = File("pydiff.out")
340 outFile.wopen()
341 for fileName in diffDict:
342 for hduNum in sorted(diffDict[fileName]):
343 outFile.writetheline("## " + fileName + "[%d]"% hduNum)
344 for x in diffDict[fileName][hduNum]:
345 outFile.writetheline(repr(x))
346 outFile.close()
347 Logger.addMessage("diffDict written to %s" % outFile.name)
348 raise SystemExit()
349
350
351 if not self.onlyTime:
352 queryDict = defaultdict()
353 queryAttr = ["Multiframe.multiframeID", "CurrentAstrometry.extNum",
354 "MultiframeDetector.extNum"] + \
355 [self.fitskeyDict[self.database][self.pixOrCat][x]
356 for x in self.DataResults[self.pixOrCat]._fields[4:]]
357
358 mfidList = [str(mfidDict[filePathDict[x]]) for x in fileList]
359 dbDataDict =defaultdict(lambda : defaultdict())
360
361 result = self._getDbData(
362 queryAttr, " AND ".join([
363 "MultiframeDetector.extNum=CurrentAstrometry.extNum",
364 "Multiframe.multiframeid<0"]),
365 pixcat=self.pixOrCat, resTuple=self.DataResults[self.pixOrCat])
366
367 for entry in result:
368 dbDataDict["default"][0] = entry
369
370 queryAttr = ["PreviousMfDZP.multiframeID",
371 "CurrentAstrometry.extNum",
372 "MultiframeDetector.extNum"] + \
373 [self.fitskeyDict[self.database][self.pixOrCat][x]
374 for x in self.DataResults[self.pixOrCat]._fields[4:]]
375
376 result = self._getDbData(
377 queryAttr, " AND ".join([
378 "versNum >= %d" % minVersNum,
379 "MultiframeDetector.extNum=CurrentAstrometry.extNum",
380 "Multiframe.multiframeid in (%s)" % (','.join(mfidList))]),
381 pixcat=self.pixOrCat, resTuple=self.DataResults[self.pixOrCat])
382
383 for entry in result:
384 dbDataDict[os.path.splitext(os.path.split(entry[0])[1])[0]][
385 entry[2]] = entry
386
387 Logger.addMessage("Fixing headers...")
388 timeStampDict = defaultdict()
389 progress = ForLoopMonitor(fileList)
390 noHistFiles = []
391 if self.onlyTime:
392 fileList = timeFileList[:]
393 for fileName in fileList:
394 Logger.addMessage("Reading %s..." % filePathDict[fileName])
395 hduList = fits.open(filePathDict[fileName], "update")
396
397
398
399 extNum = 0
400 try:
401 timeStampDict[filePathDict[fileName]] = hduList[extNum].header[
402 "%s_TIME"% self.archive.sysc.loadDatabase]
403 if not timeStampDict[filePathDict[fileName]].isdigit():
404 raise KeyError
405 except KeyError:
406 mfid = mfidDict[filePathDict[fileName]]
407 timeStampDict[filePathDict[fileName]] = "%018d" % \
408 self.archive.query("fileTimeStamp", "Multiframe",
409 "multiframeid=%d" % mfid)[0]
410
411 if self.onlyTime:
412 hduNumList = []
413 else:
414 hduNumList = sorted(diffDict[fileName])
415
416
417 for hduNum in hduNumList:
418 hdr = hduList[hduNum-1].header
419 histList = []
420 symbList = []
421 symbMinDef = 1000000
422
423 if self.fixFixed:
424 for attr, value, descriptor, lineNum in diffDict[
425 fileName][hduNum]:
426 if attr in self.fitskeyDict[self.database][
427 self.pixOrCat]:
428 if getattr(dbDataDict[fileName][hduNum],attr) \
429 == getattr(dbDataDict["default"][0], attr):
430 hdr[attr] = (value, descriptor)
431 else:
432 hdr[attr] = (
433 getattr(dbDataDict[fileName][hduNum], attr),
434 descriptor)
435 elif attr == "HISTORY":
436 pass
437 elif "SYMBOL" in attr:
438 pass
439 else:
440 hdr[attr] = (value, descriptor)
441 else:
442 for attr, value, descriptor, lineNum in diffDict[
443 fileName][hduNum]:
444 value = self.formatValue(value)
445 if attr in self.fitskeyDict[self.database][
446 self.pixOrCat]:
447 if getattr(dbDataDict[fileName][hduNum], attr) \
448 == getattr(dbDataDict["default"][0], attr):
449 hdr.append((attr, value, descriptor))
450 else:
451 hdr.append((
452 attr, getattr(dbDataDict[fileName][hduNum],
453 attr), descriptor))
454 elif attr == "HISTORY":
455 histList.append(value)
456 elif "SYMBOL" in attr:
457 symbList.append((attr, value, descriptor))
458 else:
459 hdr.append((attr, value, descriptor))
460
461 if symbList:
462 minNum = int(min(i[0].replace("SYMBOL", '')
463 for i in symbList))
464
465 for i, entry in enumerate(symbList):
466 if minNum - 1 == 0:
467 pos = hdr.index("COMMENT")
468 for x in hdr["COMMENT"]:
469 if "Symbolic translation for GAIA" in x:
470 symbMinDef = hdr.index("COMMENT", pos)
471 pos += 1
472 hdr.insert(symbMinDef+1+i, entry)
473 else:
474 hdr.insert(hdr.index(
475 "SYMBOL%d" % (minNum-1))+1+i, entry)
476
477
478 if histList:
479 histList.reverse()
480 for entry in histList:
481 hdr.add_history(entry, before="HISTORY")
482 hduList.flush(verbose=True)
483
484
485 head = []
486 regex = re.compile(
487 r'\d\d:\d\d:\d\d at \d\d\d\d\d\d\d\d \d\d:\d\d:\d\d')
488 counter = 0
489 delindex = []
490 if "HISTORY" in hdr:
491 for histentry in hdr["HISTORY"]:
492 if regex.match(histentry):
493 delindex.append(counter)
494 counter += 1
495 for i in delindex:
496 del hdr[("HISTORY", delindex[0])]
497 else:
498 noHistFiles.append((fileName,hduNum))
499 hduList.close()
500 progress.testForOutput()
501
502 Logger.addMessage("Resetting file timestamp...")
503 progress = ForLoopMonitor(timeStampDict)
504 for fileName in timeStampDict:
505
506 try:
507 fits.updateFitsStamp(fileName, timeStampDict[fileName])
508 except ValueError:
509 print fileName, timeStampDict[fileName]
510 progress.testForOutput()
511 if noHistFiles:
512 Logger.addMessage("Files without HISTORY: (%s)" % ','.join(
513 noHistFiles))
514
515
516
517 @staticmethod
523
524
525
527 """ get headers from CASU and WFAU and write them into files
528 """
529 headerHereDict = defaultdict(list)
530 headerCasuDict = defaultdict(list)
531 haveCasuDates = False
532 progress = ForLoopMonitor(mfidDict)
533 for fileName in mfidDict:
534 theFile = File(fileName)
535
536 outWfauHeader = os.path.join(
537 self.outDir, theFile.base.replace(theFile.ext, ".wfau.head"))
538 if not os.path.exists(outWfauHeader):
539 os.system('lllisthead %s > %s' % (theFile.name, outWfauHeader))
540
541
542 outCasuHeader = os.path.join(
543 self.outDir, theFile.base.replace(theFile.ext, ".casu.head"))
544 if not os.path.exists(outCasuHeader) and "final" in self.outDir:
545 shutil.copy2(outCasuHeader.replace(".final", ''), outCasuHeader)
546 if not os.path.exists(outCasuHeader):
547 if not haveCasuDates:
548 CASUQueries.sysc = self.archive.sysc
549 casuDateDict = CASUQueries.getCasuDateDirs()
550 haveCasuDates = True
551 if "hpn-ssh: not found" in casuDateDict.keys()[0]:
552 Logger.addMessage(
553 "To get CASU headers run this code on menkaure "
554 "with -H and copy the files over.")
555 raise SystemExit()
556 sshCmd = "ssh wfcam@10.0.1.7"
557 dateStr = theFile.subdir.partition('_v')[0]
558 casuPath = casuDateDict[dateStr]
559 os.system("%s '/home/wfcam/test/listhead %s' > %s" % (
560 sshCmd, os.path.join(casuPath, dateStr, theFile.base),
561 outCasuHeader))
562
563 del theFile
564 progress.testForOutput()
565
566
567
569 """ Read attributes from the database.
570 """
571 tables = list(set(["Multiframe"] + \
572 [x.partition('.')[0] for x in attributes]))
573 theSelect = ("Multiframe.catName" if pixcat == "cat" else
574 "Multiframe.fileName")
575 for attr in attributes:
576 theSelect = ','.join([theSelect, attr])
577
578 if len(tables) > 1:
579 theFrom = Join(tables, ["multiframeID"])
580 else:
581 theFrom = tables
582
583 result = self.archive.query(selectStr=theSelect,
584 fromStr=theFrom,
585 whereStr=theWhere,
586 ResultsTuple=resTuple)
587 return result
588
589
590
591 if __name__ == '__main__':
592
593 CLI.progArgs += [
594 CLI.Argument("pixorcat", "pix",
595 isValOK=lambda x: x.lower() in ['pix','cat'])
596 ]
597 CLI.progOpts += [
598 CLI.Option("B", "backup", "only copy FITS files into backup dir"),
599 CLI.Option("D", "printdiff", "only print diffs, don't update files"),
600 CLI.Option("F", "finalcheck", "check updated headers for completeness"),
601 CLI.Option("K", "fitskeys", "check these FITS keys",
602 "LIST", ','.join(CheckFitsHeaders.fitsKeys)),
603 CLI.Option("H", "getheaders", "get header info from WFAU and CASU"),
604 CLI.Option("M", "missing", "only get a list of missing keys"),
605 CLI.Option("T", "onlytime", "only reset the file timestamps"),
606 CLI.Option("Z", "fixfixed", "fix fixed files")]
607
608 cli = CLI(CheckFitsHeaders, "$Revision: 9344 $")
609 task = CheckFitsHeaders()
610 task.archive = DbSession(cli=cli)
611 task.database = task.archive.sysc.loadDatabase
612 task.isTrialRun = cli.getOpt("test")
613 task.printDiffs = cli.getOpt("printdiff")
614 task.extractHeaders = cli.getOpt("getheaders")
615 task.onlyMissing = cli.getOpt("missing")
616 task.onlyBackupFiles = cli.getOpt("backup")
617 task.fixFixed = cli.getOpt("fixfixed")
618 task.finalCheck = cli.getOpt("finalcheck")
619 task.pixOrCat = cli.getArg("pixorcat")
620 task.onlyTime = cli.getOpt("onlytime")
621 for fkey in cli.getOpt("fitskeys").split(','):
622 table, key = fkey.partition('.')[::2]
623 task.fitsKeys[table]
624
625 if task.archive.sysc.isVSA():
626 DataResultsPix = namedtuple( "DataResultsPix", "fileName multiframeID CAextNum MDextNum DECZP01 DECZP12 DECZP22 RAZP01 RAZP12 RAZP22 WCSPASS ELLIPTIC DRIBBLE EXTINCT NICOMB MAGZPT MAGZRR SEEING PERCORR")
627
628 DataResultsCat = namedtuple( "DataResultsCat", "fileName multiframeID CAextNum MDextNum APCOR7 CROWDED DECZP01 DECZP12 DESTRIPE DRIBBLE EBMVMED ELLIPTIC EXTINCT FILTFWHM MAGZPT MAGZRR MED_PA MINPIX NBSIZE NDITCOR NIGHTNUM NIGHTZPT NIGHTZRR NUMZPT NYOUT PERCORR RAZP01 RAZP12 RCORE SEEING SKYLEVEL SKYNOISE STDCRMS STRPRMS WCSPASS")
629 elif task.archive.sysc.isWSA():
630 DataResultsPix = namedtuple( "DataResultsPix", "fileName multiframeID CAextNum MDextNum CIRMED CIR_BVAR CIR_SCAL CIR_XOFF CIR_YOFF CIR_ZERO CURTNRNG DECURTN DECZP02 ELLIPTIC EXTINCT MAGZPT MAGZRR NUMBRMS RAZP02 READNOIS SEEING SKYLEVEL SKYNOISE STDCRMS WCSPASS XTALK")
631 DataResultsCat = namedtuple( "DataResultsCat", "fileName multiframeID CAextNum MDextNum CAMPOWER CAMROLE CAPPLICN CC_PRES CIRMED CIR_BVAR CIR_SCAL CIR_XOFF CIR_YOFF CIR_ZERO CNFINDEX CURTNRNG DECURTN DET_TEMP EXTINCT FILTFWHM GAIN NIGHTNUM NIGHTZPT NIGHTZRR NUMZPT NYOUT PCSYSID PERCORR PIXLSIZE RDOUT_X1 RDOUT_X2 RDOUT_Y1 RDOUT_Y2 READNOIS READOUT RUNID SC_TEMP SDSUID SEEING SKYLEVEL SKYNOISE STDCRMS WCSPASS XTALK")
632 else:
633 print "Wrong database: %s" % task.database
634 raise SystemExit()
635 task.DataResults = {"pix": DataResultsPix, "cat": DataResultsCat}
636
637 task.run()
638