1   
   2   
   3   
   4  """ 
   5     Invoke CU8. Updates the photometric zero-point values in FITS files and in 
   6     the database. 
   7   
   8     If the database update fails for any reason, re-run the script from the last 
   9     catalogue that was not updated using the -f/--from option, once the problem 
  10     is fixed. If any FITS files fail to update, an error message is logged. Fix 
  11     these files, and either recalibrate by hand (placing in the correct HISTORY 
  12     card for the current recalibration revision), or if many still need 
  13     recalibration, then run the script again in -t/--test mode which only 
  14     updates FITS files that need updating and not the database (make use of 
  15     -f/--from option too to save time). FITS files that have already been 
  16     updated will not be re-updated as there will be HISTORY card noting that 
  17     file has been updated for this present recalibration version. 
  18   
  19     To apply the same set of recalibration values to files that have been 
  20     freshly transfered from CASU, go through the same procedure but add the 
  21     -r/--rerun option with the name of the transfer log. Just the re-transfered 
  22     files will be updated, and the previous recalibration version number will be 
  23     applied to them. 
  24   
  25     @todo: CU8 *still* has issues with interrupts and not closing down properly 
  26            may be sensible to give up with the multi-threading idea... 
  27     @todo: Order FITS updates in disk order?? 
  28     @todo: Reduce code replication, break up into functions further, document, 
  29            improve calib table parser. 
  30     @todo: Tidy up calibrateZeroPoint. 
  31     @todo: Separate out FITS Writing code etc. into a FitsUtils.Writer class, 
  32            for CU6 etc. 
  33     @todo: Implement global recalibration solution. 
  34   
  35     @author: N.J.G. Cross 
  36     @org:    WFAU, IfA, University of Edinburgh 
  37   
  38     @newfield contributors: Contributors, Contributors (Alphabetical Order) 
  39     @contributors: R.S. Collins 
  40   
  41     @requires: PyFITS 
  42  """ 
  43   
  44  from __future__ import division, print_function 
  45   
  46  from   collections import defaultdict 
  47  import math 
  48  import numpy 
  49  from   operator    import itemgetter 
  50  import os 
  51  from   threading   import Thread 
  52  import time 
  53   
  54  import wsatools.Calibration             as calib 
  55  from   wsatools.CLI                 import CLI 
  56  import wsatools.CSV                     as csv 
  57  import wsatools.DbConnect.CommonQueries as queries 
  58  from   wsatools.DbConnect.CuSession import CuSession 
  59  from   wsatools.DbConnect.DbSession import Join, SelectSQL 
  60  import wsatools.DbConnect.DbConstants   as dbc 
  61  import wsatools.DbConnect.Schema        as schema 
  62  import wsatools.FitsUtils               as fits 
  63  from   wsatools.Logger              import Logger, ForLoopMonitor 
  64  from   wsatools.SystemConstants     import DepCodes 
  65  import wsatools.Utilities               as utils 
  66  import wsatools.DataFactory             as df 
  67   
  68   
  69 -class Cu8(CuSession): 
   70      """ 
  71      Updates the photometric zero-point values in FITS files and in the database, 
  72      from the values provided in the CASU-supplied calibration table file if 
  73      specified with -m/--manual option otherwise a global solution to all data 
  74      is applied (not implemented yet!). 
  75   
  76      """ 
  77       
  78       
  79   
  80      cuNum = 8 
  81      calibKeys = [('MAGZPT', 'Photometric ZP (mags) for default extinction'), 
  82                   ('MAGZRR', 'Photometric ZP error (mags)'), 
  83                   ('NUMZPT', 'Number of standards used'), 
  84                   ('NIGHTZPT', 'Average photometric ZP (mags) for night'), 
  85                   ('NIGHTZRR', 'Photometric ZP sigma for night (mags)'), 
  86                   ('NIGHTNUM', 'Number of ZP used in band for night')] 
  87      maxDeltaZP = 0.05 
  88       
  89      zpComment = "Photometric ZP (mags) for default extinction" 
  90      zpErrComment = "Photometric ZP error (mags)" 
  91      numZPComment = " Number of standards used" 
  92      nightZPComment = "Average photometric ZP (mags) for night" 
  93      nightZPErrComment = "Photometric ZP sigma for night (mags)" 
  94      nightNumComment = "Number of ZP used in band for night" 
  95   
  96      """ List of MultiframeDetector attributes to update together with their 
  97          calibration FITS keywords and descriptions. Order determines the 
  98          sequence that the old values are listed in new HISTORY card. 
  99      """ 
 100      highDeltaZP = 0.5 
 101      """ Warn if change in zero-point is greater than this value. 
 102      """ 
 103      minimumDeltaZP = 5.0e-4 
 104      """ Only recalibrate detection table if change in zero-point is greater 
 105          than this value. 
 106      """ 
 107      misplacedKeys = ['MAGZPT', 'MAGZRR'] 
 108      """ List of keys that should be removed from primary header. 
 109      """ 
 110      paddingLine = "WFAU added block padding" 
 111      """ Comment line to pad out excess space in FITS headers. 
 112      """ 
 113      testDir = 'cu8_test_' + os.getenv('USER') 
 114      """ Name of testing directory to store FITS file copies. 
 115      """ 
 116       
 117       
 118       
 119   
 120      copyFits = False 
 121      """ Update copies of the FITS files instead of originals? 
 122      """ 
 123      dateRange = CuSession.sysc.obsCal.dateRange() 
 124      """ Include data observed between these nights only when doing the 
 125          automatic calibration of multi-epoch data. 
 126      """ 
 127      fromMf = None 
 128      """ Continue recalibrations from this multiframe ID and catalogue file 
 129          onwards in the list (format tuple(int, str)). If no catalogue given, 
 130          then not FITS updates will occur. If mfID=0 then all multiframes in the 
 131          list are updated. 
 132      """ 
 133      releaseNum = None 
 134      """ Current release number of products to be automatically calibrated. 
 135      """ 
 136      reqProdRange = None 
 137      """ Only apply the automatic calibration of multi-epoch data to this 
 138          product ID range. 
 139      """ 
 140      tablePathName = '' 
 141      """ Path to the file containing the table of new calibration values. 
 142      """ 
 143       
 144       
 145   
 146      _fitsThread = None 
 147      """ A separate thread to run the FITS file updater. 
 148      """ 
 149      _mfdAttrKeys = None 
 150      """ List of MultiframeDetector attributes to update, and corresponding FITS 
 151          keys. Created by parsing the schema. 
 152      """ 
 153      _newVersNum = 0 
 154      """ Current recalibration version number. 
 155      """ 
 156      _numErrors = 0 
 157      """ Counter for number of run-time errors raised. 
 158      """ 
 159      _numWarnings = 0 
 160      """ Counter for number of run-time warnings raised. 
 161      """ 
 162      _prevMDZPattrs = '' 
 163      """ Comma-separated string of MultiframeDetector attributes to copy into 
 164          the PreviousMDZP table. Created by parsing the schema for PreviousMDZP, 
 165          and setting versNum to the current version. 
 166      """ 
 167      _recalSchema = None 
 168      """ Dictionary of lists of columns for recalibration attributes referenced 
 169          by the detection table for every programme that is to be recalibrated. 
 170      """ 
 171      _timeStamp = '' 
 172      """ Time stamp to place into HISTORY card, denoting time of FITS update. 
 173      """ 
 174       
 175   
 177          """ Run CU8 
 178          """ 
 179          if self.programmeID: 
 180               
 181              if not self._calibrateZeroPoint(): 
 182                  return 
 183   
 184              self.programmeID = set([self.programmeID]) 
 185   
 186          Logger.addMessage("Determining current recalibration revision...") 
 187          self._determineVersionNumber() 
 188          Logger.addMessage("...this will be revision %s" % self._newVersNum) 
 189          Logger.addMessage("Parsing schemas...") 
 190          self._parseSchemas() 
 191   
 192          Logger.addMessage("Retrieving multiframeIDs from database...") 
 193          mfDetails = dict() 
 194           
 195          mfProgs = defaultdict(set) 
 196           
 197          for mfID, progID, imgPathName, catPathName in self.archive.query( 
 198            selectStr="M.multiframeID, programmeID, fileName, catName", 
 199              fromStr="Multiframe AS M, ProgrammeFrame AS P", 
 200             whereStr="M.multiframeID=P.multiframeID AND M.multiframeID>0 AND " 
 201                      "deprecated<128 AND catName NOT LIKE '%empty%' " 
 202                      "ORDER BY programmeID ASC"): 
 203               
 204              mfDetails[os.path.basename(catPathName.split(':')[-1])] = \ 
 205                [mfID, imgPathName.split(':')[-1], catPathName.split(':')[-1]] 
 206               
 207              mfProgs[mfID].add(progID) 
 208   
 209           
 210           
 211           
 212           
 213          fitsCalibList = [] 
 214          for catFileName, extNums, newZpValues \ 
 215           in (self._parseCalibTableFile(self.tablePathName) 
 216               if self.tablePathName else []): 
 217              try: 
 218                  fitsCalibList.append( 
 219                    mfDetails[catFileName] + [extNums, newZpValues]) 
 220              except KeyError: 
 221                  self._numWarnings += 1 
 222                  self.logBrokenFile(catFileName, 
 223                                     self.sysc.filesNotIngestedPath()) 
 224   
 225          if self._numWarnings: 
 226              Logger.addMessage("<Warning> %s files not found in archive. Please" 
 227                " inspect %s for complete list." % (self._numWarnings, 
 228                self.sysc.filesNotIngestedPath())) 
 229   
 230              self._numWarnings = 1 
 231   
 232          Logger.addMessage("Sorting database calibration list only into " 
 233                            "programmeID order...") 
 234           
 235          dbCalibList = sorted(fitsCalibList, key=itemgetter(0)) 
 236          progSeq = [self.programme.getProgIDfromName('uds'), 
 237                     self.programme.getProgIDfromName('uds_sv'), 
 238                     self.programme.getProgIDfromName('dxs'), 
 239                     self.programme.getProgIDfromName('dxs_sv'), 
 240                     self.programme.getProgIDfromName('gcs'), 
 241                     self.programme.getProgIDfromName('gcs_sv'), 
 242                     self.programme.getProgIDfromName('las'), 
 243                     self.programme.getProgIDfromName('las_sv'), 
 244                     self.programme.getProgIDfromName('gps'), 
 245                     self.programme.getProgIDfromName('gps_sv'), 
 246                     self.programme.getProgIDfromName('pts'), 
 247                     self.programme.getProgIDfromName('cal')] 
 248   
 249          dbCalibList = utils.arbSort(dbCalibList, progSeq, 
 250            key=lambda x: max(mfProgs[x[0]]), isFullKeySet=False) 
 251   
 252          if self.fromMf:   
 253               
 254              if self.fromMf[0]: 
 255                  try: 
 256                      idx = map(itemgetter(0), dbCalibList).index(self.fromMf[0]) 
 257                  except ValueError: 
 258                      self._numErrors += 1 
 259                      raise Cu8.CuError("Cannot find the given starting " 
 260                          "multiframe ID %s in the list of recalibrations!" 
 261                          % self.fromMf[0]) 
 262                  else: 
 263                      dbCalibList = dbCalibList[idx:] 
 264   
 265               
 266              if len(self.fromMf) == 1: 
 267                  fitsCalibList = [] 
 268              else:  
 269                  firstIdx = 0 
 270                  for index, (mfID, _i, cat, _n, _z) in enumerate(fitsCalibList): 
 271                      if cat.endswith(self.fromMf[1]): 
 272                          firstIdx = index 
 273                          break 
 274                  if firstIdx: 
 275                      fitsCalibList = fitsCalibList[firstIdx:] 
 276                  else: 
 277                      self._numErrors += 1 
 278                      raise Cu8.CuError( 
 279                        "Cannot find the given starting catalogue %s in the list" 
 280                        " of recalibrations!" % self.fromMf[1]) 
 281   
 282          if not dbCalibList and not fitsCalibList: 
 283              raise Cu8.CuError("Nothing to be done.") 
 284   
 285          Logger.addMessage("Updating recalibration revision database details") 
 286          self._updatePhotCalVers() 
 287   
 288           
 289           
 290           
 291           
 292           
 293          if fitsCalibList: 
 294              self._fitsThread = Thread(target=self._updateFiles, 
 295                                          args=(fitsCalibList,)) 
 296              Logger.addMessage("Starting FITS update thread") 
 297              self._fitsThread.start() 
 298   
 299          Logger.addMessage("Starting DB update thread") 
 300          Logger.addMessage("DB: %s multiframes will be updated." 
 301                            % len(dbCalibList)) 
 302   
 303          progress = ForLoopMonitor(dbCalibList, threadID="DB") 
 304          for mfID, imgPathName, catPathName, extNums, zpValues in dbCalibList: 
 305              frameType = fits.determineFrameType(imgPathName) 
 306              prodType = 'mosaic' if 'mosaic' in frameType else None 
 307              if not prodType: 
 308                  prodType = 'tile' if 'tile' in frameType else 'stack' 
 309              progIDs = mfProgs[mfID] - set([dbc.intDefault()]) 
 310   
 311               
 312               
 313              Logger.addMessage("DB: Updating multiframeID %s of programme(s) %s" 
 314                % (mfID, ', '.join(self.programme.getAcronym(progID).upper() 
 315                                   for progID in progIDs)), echoOff=True) 
 316   
 317               
 318              tables = set(self.programme.getPhotometryTable(programmeID=progID) 
 319                           for progID in progIDs) 
 320   
 321              tables = tables.union(set(self.programme.getSynopticSourceTable(programmeID=progID) 
 322                           for progID in progIDs 
 323                           if self.programme.getSynopticSourceTable(programmeID=progID) 
 324                           != dbc.charDefault() and prodType == self.programme.getAttr("epochFrameType", programmeID=progID))) 
 325              try: 
 326                  self._updateArchive(mfID, extNums, tables, zpValues) 
 327                   
 328                  self.archive.commitTransaction() 
 329                   
 330                  self.programmeID.update(progIDs) 
 331              except: 
 332                  self._numErrors += 1 
 333                  Logger.addMessage("DB: <ERROR> Updating database for " 
 334                      "multiframeID %s, programmeID(s) %s, catalogue file %s" 
 335                      % (mfID, csv.join(progIDs), catPathName)) 
 336                  raise 
 337   
 338              progress.testForOutput() 
 339   
 340           
 341          if self._fitsThread: 
 342              self._fitsThread.join() 
 343              del self._fitsThread   
 344   
 345          if self._numWarnings > 0 or self._numErrors > 0: 
 346              Logger.addMessage( 
 347                "<Info> Number of Warnings: %s, Number of Errors: %s" % 
 348                (self._numWarnings, self._numErrors)) 
 349   
 350           
 351          numRecals = self.archive.queryNumRows("PreviousMFDZP", 
 352            "versNum=%s" % self._newVersNum, distinctAttr="multiframeID") 
 353          Logger.addMessage("<Info> %s multiframes have been recalibrated in " 
 354                            "revision %s" % (numRecals, self._newVersNum)) 
  355   
 356       
 357   
 368   
 369       
 370   
 372          """ 
 373          Calculates new zero-point values that can be used by CU8 to recalibrate 
 374          data in the database. Compares the photometry in multi-epoch products 
 375          to single epoch observations to find the zero-point offsets for the 
 376          single epoch frames. 
 377   
 378          @return: True, if new calibrations need to be applied. 
 379          @rtype:  bool 
 380   
 381          """ 
 382          Logger.addMessage("Calibrating single epoch observations of " 
 383                            "multi-epoch products...") 
 384   
 385          isCasuExtractor = self.programme.getAttr("extractTool") == "CASU" 
 386          detTables = (self.programme.getDetectionTable(), 
 387                       self.programme.getAstrometryTable(), 
 388                       self.programme.getPhotometryTable()) 
 389   
 390          detJoin = Join(detTables, ["multiframeID", "extNum", "seqNum"]) 
 391          mfdJoin = Join(detTables + ("MultiframeDetector",), 
 392                         ["multiframeID", "extNum"]) 
 393   
 394   
 395   
 396           
 397          aperture = (3 if isCasuExtractor or self.sysc.isVSA() else 7) 
 398          correction = ("+aperCor%s" % aperture if not (isCasuExtractor or self.sysc.isVSA()) else '') 
 399   
 400          predicates = ("aperMag%dErr BETWEEN 0 AND %s AND class=-1 AND " 
 401                        "ppErrBits=0 AND " % (aperture, 5 * self.sysc.phtErrLimit) 
 402                        + detTables[0] + ".multiframeID IN (%s)") 
 403   
 404          mfdPred = ("%s.seqNum=%s.seqNum AND %s.seqNum=%s.seqNum AND " 
 405                     % (detTables[0], detTables[1], detTables[0], detTables[2]) 
 406                     + "MultiframeDetector.%s AND %s" 
 407                     % (DepCodes.selectNonDeprecated, predicates)) 
 408   
 409           
 410           
 411          intProdType = self.programme.getAttr("epochFrameType") 
 412          srcProdType = self.programme.getAttr("sourceProdType") 
 413          reqProducts = \ 
 414            self.reqProdRange or queries.getRequiredProductIDs(self.programme) 
 415   
 416          Logger.addMessage("Selecting deep products...") 
 417   
 418           
 419          frameTypeSel = (queries.getFrameSelection(srcProdType) 
 420                          + " AND frameType like '%deep%'") 
 421   
 422          finalDeepStacks = self.archive.query( 
 423            selectStr="productID, Multiframe.multiframeID, catName", 
 424              fromStr=Join(["Multiframe", "ProgrammeFrame"], ["multiframeID"]), 
 425             whereStr="deprecated=0 AND " + frameTypeSel + " AND programmeID=%s" 
 426                      " AND productID BETWEEN %s AND %s AND releaseNum=%s" 
 427                      % (self.programmeID, reqProducts[0], reqProducts[-1], 
 428                         self.releaseNum)) 
 429   
 430          if not finalDeepStacks: 
 431              Logger.addMessage("<Info> No multi-epoch products to calibrate") 
 432              return False 
 433   
 434           
 435          dmfIDs = ','.join(map(str, map(itemgetter(1), finalDeepStacks))) 
 436          detector = "%s.multiframeID, %s.extNum" % detTables[:2] 
 437          factNo = (20000 if srcProdType == 'mosaic' or srcProdType == 'tile' else 
 438                    5000) 
 439   
 440          deepPhotCounts = self.archive.queryNumRows(detJoin, predicates % dmfIDs, 
 441                                                     groupBy=detector) 
 442   
 443          if not deepPhotCounts: 
 444              raise Cu8.CuError("No deep detections found. " 
 445                                "Check that CU4 ran correctly") 
 446   
 447          medianCount = numpy.median([det.count for det in deepPhotCounts]) 
 448          fact = math.floor(medianCount / factNo) if medianCount > factNo else 1 
 449          predicates2 = predicates + " AND (%s.seqNum %% %s)=0" 
 450          deepPhotCounts = self.archive.queryNumRows(detJoin, 
 451            predicates2 % (dmfIDs, detTables[0], fact), groupBy=detector) 
 452   
 453          Logger.addMessage("Getting deep photometry...") 
 454          deepStackPhotom = numpy.array(self.archive.query( 
 455            selectStr=detector + ", ra, dec, aperMag%d, aperMag%dErr" 
 456                      % (aperture, aperture), 
 457              fromStr=detJoin, 
 458             whereStr=predicates2 % (dmfIDs, detTables[0], fact) 
 459                      + " ORDER BY %s, dec OPTION (MAXDOP 1)" % detector)) 
 460   
 461          isMosaic = intProdType == 'stack' and (srcProdType == 'mosaic' 
 462                                               or srcProdType == 'tile') 
 463   
 464          Logger.addMessage("...will calibrate for %s products..." 
 465                            % len(finalDeepStacks)) 
 466   
 467          problemExtensions = [] 
 468          calibrationLines = [] 
 469          progress = ForLoopMonitor(finalDeepStacks) 
 470          for productID, dmfID, deepStackCatName in finalDeepStacks: 
 471              Logger.addMessage( 
 472                "Checking photometry against deep stack %s and cat %s" 
 473                % (dmfID, deepStackCatName), alwaysLog=False) 
 474   
 475              deepExtList = self.archive.query("extNum", "MultiframeDetector", 
 476                whereStr="multiframeID=%s AND deprecated=0" % dmfID, 
 477                 orderBy="extNum") 
 478   
 479               
 480              deepPhotom = calib.getIntermedPhotometry(dmfID, deepPhotCounts, 
 481                deepStackPhotom, deepExtList) 
 482   
 483              if not deepPhotom \ 
 484                or max([deepPhotom[key].size for key in deepPhotom]) == 0: 
 485                  raise Cu8.CuError("Deep frame, mfID=%s has no detections. " 
 486                                    "Check that CU4 ran correctly" % dmfID) 
 487   
 488              Logger.addMessage("Recalibrating intermediate stacks in " 
 489                                "productID=%d" % productID, alwaysLog=False) 
 490   
 491              if srcProdType == 'tile' and intProdType == 'stack': 
 492                   
 493                  intProductIDs = self.archive.query("intProductID", "ProductLinks", 
 494                      "programmeID=%s and combiProdType='%s' and intProdType='%s' " 
 495                      "and combiProductID=%s" % (self.programmeID, srcProdType, 
 496                                                intProdType, productID)) 
 497   
 498                  intStacks = sorted(queries.getIntStacks(self.archive, 
 499                     self.programmeID, intProductIDs, intProdType, self.dateRange, 
 500                     intFrameType=intProdType)) 
 501              else: 
 502                  intStacks = sorted(queries.getIntStacks(self.archive, 
 503                     self.programmeID, [productID], srcProdType, self.dateRange, 
 504                     intFrameType=intProdType)) 
 505   
 506              if intStacks: 
 507                  mfIDs = ','.join(str(mfID) for mfID in intStacks) 
 508                   
 509                  if intProdType == 'tile': 
 510                      pawPrintZPDict = getZPHistory(self.archive, mfIDs) 
 511   
 512                  intStkZPs = self.archive.query( 
 513                    selectStr="multiframeID, extNum, photZPCat, photZPErrCat, " 
 514                              "numZPCat, nightZPCat, nightZPErrCat, nightZPNum", 
 515                      fromStr="MultiframeDetector", 
 516                     whereStr="multiframeID IN (%s) OPTION (MAXDOP 1)" % mfIDs) 
 517   
 518                  intPhotCounts = self.archive.queryNumRows(mfdJoin, 
 519                    mfdPred % mfIDs, groupBy=detector) 
 520   
 521                  factNo = 20000 if intProdType == 'tile' else 5000 
 522                  fact = (math.floor(numpy.median(intPhotCounts) / factNo) 
 523                          if numpy.median(intPhotCounts) > factNo else 1) 
 524   
 525                  mfdPred2 = mfdPred + " AND (%s.seqNum %% %s)=0" 
 526                  intPhotCounts = self.archive.queryNumRows(mfdJoin, 
 527                    mfdPred2 % (mfIDs, detTables[0], fact), groupBy=detector) 
 528   
 529                  intermediateStackPhotometry = numpy.array(self.archive.query( 
 530                    selectStr=detector + ", ra, dec, (aperMag%d-photZPCat%s), " 
 531                              "aperMag%dErr" % (aperture, correction, aperture), 
 532                      fromStr=mfdJoin, 
 533                     whereStr=mfdPred2 % (mfIDs, detTables[0], fact) 
 534                              + " ORDER BY %s OPTION (MAXDOP 1)" % detector)) 
 535   
 536              Logger.addMessage("Got intermediate frame photometry", 
 537                                alwaysLog=False) 
 538   
 539              for mfID in intStacks: 
 540                  catName = self.archive.query("catName", "Multiframe", 
 541                    whereStr="multiframeID=%s" % mfID, 
 542                    firstOnly=True).split(':')[-1] 
 543   
 544                  Logger.addMessage( 
 545                    "Calibrating int stack %s and catalogue %s" % (mfID, catName), 
 546                    alwaysLog=False) 
 547   
 548                  extNumList = [eNum for frameID, eNum, _number in intPhotCounts 
 549                                      if frameID == mfID] 
 550   
 551                   
 552                  intPhotDict = calib.getIntermedPhotometry(mfID, intPhotCounts, 
 553                    intermediateStackPhotometry, extNumList) 
 554   
 555                  newZPDict = calib.checkAstrometryPhotometry( 
 556                    [(dmfID, deepPhotom)], sorted(intPhotDict.items()), 
 557                    isMosaic=isMosaic, 
 558                    compDict={dmfID: deepStackCatName}, isRecalMode=True) 
 559   
 560                  oldZPInfo = self._oldZPDict(intStkZPs, mfID) 
 561                  Logger.addMessage("Old ZP dict %s, new ZP dict %s for mfID %s" 
 562                    % (oldZPInfo, newZPDict, mfID), alwaysLog=False) 
 563   
 564                  for extNum in extNumList: 
 565                      if abs(newZPDict[extNum][0] - 
 566                             oldZPInfo[extNum]['MAGZPT']) > Cu8.maxDeltaZP: 
 567   
 568                          problemExtensions.append("%d, %d, %6.4f\n" 
 569                            % (mfID, extNum, newZPDict[extNum][0] 
 570                               - oldZPInfo[extNum]['MAGZPT'])) 
 571   
 572                          curDet = \ 
 573                            "multiframeID=%s AND extNum=%s" % (mfID, extNum) 
 574   
 575                          Logger.addMessage("<Info> New ZP for %s is: %.4f, %.4f" 
 576                            " after matching to deep frame %s. The old zeropoint" 
 577                            " is %.4f. This frame will be deprecated." % (curDet, 
 578                            newZPDict[extNum][0], newZPDict[extNum][1], dmfID, 
 579                            oldZPInfo[extNum]['MAGZPT'])) 
 580   
 581                           
 582                          self.archive.update("MultiframeDetector", 
 583                            [("deprecated", DepCodes.poorPhotom)], where=curDet) 
 584                          self.archive.update(self.programme.getDetectionTable(), 
 585                            [("deprecated", DepCodes.propDepDet)], where=curDet) 
 586                      else: 
 587                          history = "Recalibrated ZP using deep stack %s at %s" \ 
 588                            % (os.path.join( 
 589                              os.path.basename(os.path.dirname(deepStackCatName)), 
 590                              os.path.basename(deepStackCatName)), 
 591                               utils.makeDateTime()) 
 592   
 593                          calibrationLines += [ 
 594                            "%s #%d\n" % (os.path.basename(catName), (extNum - 1)), 
 595                            "MAGZPT  = %5.4f / %s\n" % 
 596                              (newZPDict[extNum][0], Cu8.zpComment), 
 597                            "MAGZRR  = %5.4f / %s\n" % 
 598                              (newZPDict[extNum][1], Cu8.zpErrComment), 
 599                            "NUMZPT  = %d / %s\n" % 
 600                              (oldZPInfo[extNum]['NUMZPT'], Cu8.numZPComment), 
 601                            "NIGHTZPT  = %5.4f / %s\n" % 
 602                              (oldZPInfo[extNum]['NIGHTZPT'], Cu8.nightZPComment), 
 603                            "NIGHTZRR  = %5.4f / %s\n" % 
 604                              (oldZPInfo[extNum]['NIGHTZRR'], Cu8.nightZPErrComment), 
 605                            "NIGHTNUM  = %d / %s\n" % 
 606                              (oldZPInfo[extNum]['NIGHTNUM'], Cu8.nightNumComment), 
 607                            "HISTORY %s\n" % (history)] 
 608                          if intProdType == 'tile': 
 609                              deltaZP = newZPDict[extNum][0] - oldZPInfo[extNum]['MAGZPT'] 
 610                              calibrationLines += getPawPrintCalLines( 
 611                                  pawPrintZPDict[mfID], deltaZP, history) 
 612   
 613                   
 614                  if not self.archive.queryEntriesExist("MultiframeDetector", 
 615                    where="multiframeID=%s AND %s" 
 616                          % (mfID, DepCodes.selectNonDeprecated)): 
 617   
 618                      self.archive.update("Multiframe", 
 619                        [("deprecated", DepCodes.propDepFra)], 
 620                        where="multiframeID=%s AND %s" 
 621                              % (mfID, DepCodes.selectNonDeprecated)) 
 622   
 623              progress.testForOutput() 
 624   
 625          if not calibrationLines: 
 626              Logger.addMessage("No new calibrations required") 
 627              return False 
 628   
 629          Logger.addMessage("Writing new calibrations to file " 
 630                            + self.tablePathName) 
 631   
 632          file(self.tablePathName, "w").writelines(calibrationLines) 
 633          return True 
  634   
 635       
 636   
 638          """ Looks in table PhotCalVers for the previous recalibration version 
 639              number and increments. If this is a re-run or we are just updating 
 640              FITS files the previous version number is used. 
 641          """ 
 642          self._newVersNum = self._getNextID("versNum", "PhotCalVers") 
 643          if self.archive.isTrialRun or (self.fromMf and self.fromMf[0] != 0): 
 644              self._newVersNum -= 1 
  645   
 646       
 647   
 649          """ ? 
 650          """ 
 651          headerZPDict = {} 
 652          for mfID, eNum, photZP, photZPErr, numZP, nightZP, nightZPErr, \ 
 653            nightNum in intStkZPs: 
 654              if mfID == multiframeID: 
 655                  headerZPDict[eNum] = {"MAGZPT": photZP, "MAGZRR": photZPErr, 
 656                                        "NUMZPT": numZP, "NIGHTZPT": nightZP, 
 657                                        "NIGHTZRR": nightZPErr, 
 658                                        "NIGHTNUM": nightNum} 
 659   
 660          return headerZPDict 
  661   
 662       
 663   
 665          """ Parses WSA_MultiframeSchema.sql to determine MultiframeDetector 
 666              and PreviousMFDZP table attributes. Also, the programme schemas 
 667              for every programme to be recalibrated to determine the photometry 
 668              attributes to be recalibrated. 
 669   
 670              @warning: This function must be called after the new version number 
 671                        is set. 
 672          """ 
 673          mfdSchema, pzpSchema = schema.parseTables("WSA_MultiframeSchema.sql", 
 674            ["MultiframeDetector", "PreviousMFDZP"]) 
 675   
 676          keys = [key for key, _desc in Cu8.calibKeys] 
 677          self._mfdAttrKeys = [(attr.name, attr.fitsKeyword) 
 678            for attr in mfdSchema.columns if attr.fitsKeyword in keys] 
 679   
 680          self._prevMDZPattrs = utils.joinNested(pzpSchema.columns)\ 
 681                                .replace("versNum", "'%s'" % self._newVersNum) 
 682   
 683          self._recalSchema = {} 
 684          for progID in self.programme.getProgIDList(): 
 685              self.programme.setCurRow(programmeID=progID) 
 686              detTable = self.programme.getPhotometryTable() 
 687              if detTable not in self._recalSchema: 
 688                  script = self.programme.getSchemaScript() 
 689                  try: 
 690                      detTableSchema = schema.parseTables(script, [detTable])[0] 
 691                  except IOError as error: 
 692                      if script != dbc.charDefault(): 
 693                          Logger.addExceptionWarning(error) 
 694                      script = None 
 695                  if not script: 
 696                       
 697                      Logger.addMessage("<Warning> Cannot find schema for " 
 698                        "programme %s. Will use typical schema (prog=101) for " 
 699                        "detection table attributes." % progID) 
 700                      self.programme.setCurRow(programmeID=101) 
 701                      table = self.programme.getPhotometryTable() 
 702                      script = self.programme.getSchemaScript() 
 703                      detTableSchema = schema.parseTables(script, [table])[0] 
 704   
 705                  self._recalSchema[detTable] = [attr.name 
 706                    for attr in detTableSchema.columns 
 707                    if ('Mag' in attr.name and 'err' not in attr.name.lower() and 
 708                        attr.name != 'deltaMag')] 
 709               
 710              synSrcTable = self.programme.getSynopticSourceTable() 
 711              if synSrcTable != dbc.charDefault() and synSrcTable not in self._recalSchema: 
 712                  script = self.programme.getSchemaScript() 
 713                  try: 
 714                      synSrcSchema = schema.parseTables(script, [synSrcTable])[0] 
 715                  except IOError as error: 
 716                      if script != dbc.charDefault(): 
 717                          Logger.addExceptionWarning(error) 
 718                      script = None 
 719                  if script: 
 720                      self._recalSchema[synSrcTable] = [attr.name 
 721                        for attr in synSrcSchema.columns 
 722                         if ('Mag' in attr.name and 'err' not in attr.name.lower()) 
 723                         or ('Pnt' in attr.name and 'err' not in attr.name.lower())] 
  724   
 725   
 726   
 727       
 728   
 732   
 733   
 734   
 736          """ Updates end date of last recalibration version entry in table 
 737              PhotCalVers and inserts details for new recalibration version. An 
 738              additional entry is added for re-runs, with an alternative start 
 739              date. This is to record the complete set of cuEventID for a 
 740              recalibration version number. 
 741          """ 
 742          utDate = utils.makeMssqlTimeStamp() 
 743          self.archive.updateEntries("PhotCalVers", [("endDate", repr(utDate))], 
 744              [("versNum", self._newVersNum - 1)]) 
 745          self.archive.insertData("PhotCalVers", [self._newVersNum, 
 746            self.cuEventID, utDate, str(self.sysc.obsCal.lateDefaultDateTime)], 
 747            enforcePKC=True) 
  748   
 749       
 750   
 752          """ 
 753          Convert calibration table file to a dictionary of useful entities. 
 754   
 755          @todo: This should be a self-contained function, abolishing need for 
 756                 curInfo + helper func, would optimised parsing. 
 757   
 758          @param tablePathName: Path to the file containing the table of new 
 759                                calibration values. 
 760          @type  tablePathName: str 
 761   
 762          @return: A list of pairs: FITS file name, and dictionary of FITS header 
 763                   keyword/value pairs. 
 764          @rtype:  list(tuple(str, dict))  INCORRECT! 
 765   
 766          """ 
 767          tableInfo = [] 
 768          curFileName = '' 
 769          curInfo = [] 
 770          Logger.addMessage("Parsing calibration table in: %s" % tablePathName) 
 771          for line in file(tablePathName): 
 772              columns = line.split() 
 773              if columns[0].endswith('.fits'): 
 774                  fileName = columns[0] 
 775                  if not curFileName:   
 776                      curFileName = fileName 
 777                   
 778                  if fileName != curFileName: 
 779                      tableInfo.append(self._getDict(curFileName, curInfo)) 
 780                      curFileName = fileName 
 781                      curInfo = [] 
 782               
 783              curInfo.append(line) 
 784   
 785           
 786          if curFileName: 
 787              tableInfo.append(self._getDict(curFileName, curInfo)) 
 788   
 789          return tableInfo 
  790   
 791       
 792   
 793 -    def _getDict(self, fitsName, infoArray): 
  794          """Produces dictionary of FITS updates from table. 
 795          """ 
 796          nLines = len(infoArray) 
 797          extNos = [] 
 798          for ii in range(nLines): 
 799              wrds = infoArray[ii].split() 
 800              if wrds[0] == fitsName: 
 801                  extNos.append((int(wrds[1][1:]), ii)) 
 802          nExt = len(extNos) 
 803          keys = [key for key, _desc in Cu8.calibKeys] + ['HISTORY'] 
 804          extnInfo = [] 
 805          extList = [(det + 1) for det, _lineNo in extNos] 
 806          for extn in range(nExt): 
 807              minRwNo = extNos[extn][1] 
 808              if extn < (nExt - 1): 
 809                  maxRwNo = extNos[extn + 1][1] 
 810              else: 
 811                  maxRwNo = nLines 
 812              nRows = maxRwNo - minRwNo 
 813              for row in range(nRows): 
 814                  rwNo = row + minRwNo 
 815                  for key in keys: 
 816                      tmp = self._getAttr(key, infoArray, rwNo) 
 817                      if tmp != None: 
 818                          extnInfo.append((key + '_' + str(extNos[extn][0]), tmp)) 
 819          return fitsName, extList, dict(extnInfo) 
  820   
 821       
 822   
 823 -    def _addHistory(self, historyPhrase, header, numSpareKeys): 
  824          """ 
 825          Update the FITS header to consolidate previous CASU HISTORY cards, and 
 826          to add in a new HISTORY card with supplied details. If the number of 
 827          header keys are reduced such that the few header block are required, 
 828          then additional padding lines are inserted to maintain the number of 
 829          header blocks, hence the file update will not require the entire FITS 
 830          file to be written (just the header). 
 831   
 832          @param historyPhrase: New HISTORY card. 
 833          @type  historyPhrase: str 
 834          @param header:        FITS file header to update. 
 835          @type  header:        pyfits.Header 
 836          @param numSpareKeys:  Number of spare keywords available in current 
 837                                header block. 
 838          @type  numSpareKeys:  int 
 839   
 840          """ 
 841           
 842          numSpareKeys += self._shrinkHistory(header) 
 843   
 844          while True: 
 845              if len(historyPhrase) > fits.maxHdrEntryLen(): 
 846                   
 847                  breakIndex = 1 + \ 
 848                      historyPhrase.rfind(' ', 0, fits.maxHdrEntryLen()) 
 849                  if not breakIndex \ 
 850                     or historyPhrase[fits.maxHdrEntryLen()] == ' ': 
 851                      breakIndex = fits.maxHdrEntryLen() 
 852                  header.add_history(historyPhrase[:breakIndex]) 
 853                  numSpareKeys -= 1 
 854              else: 
 855                  header.add_history(historyPhrase) 
 856                  numSpareKeys -= 1 
 857                  break 
 858              historyPhrase = '(cont.) ' + historyPhrase[breakIndex:].lstrip() 
 859   
 860           
 861           
 862          if numSpareKeys > fits.maxHdrBlockKeys(): 
 863              for _key in range(numSpareKeys - fits.maxHdrBlockKeys()): 
 864                  header.add_comment(Cu8.paddingLine) 
  865   
 866       
 867   
 869          """ 
 870          Remove misplaced keys from the primary header and update HISTORY card. 
 871   
 872          @param pHeader: FITS file primary header. 
 873          @type  pHeader: pyfits.Header 
 874   
 875          """ 
 876           
 877          while True: 
 878              if pHeader[-1] == Cu8.paddingLine: 
 879                  del pHeader[-1] 
 880              else: 
 881                  break 
 882           
 883          numSpareKeys = (fits.maxHdrBlockKeys() - len(pHeader.items()) 
 884                         % fits.maxHdrBlockKeys()) 
 885          history = [] 
 886          for key in Cu8.misplacedKeys: 
 887              oldValue = pHeader.get(key) 
 888              if oldValue is not None: 
 889                  history.append('%s=%s' % (key, oldValue)) 
 890                  del pHeader[key] 
 891          numSpareKeys += len(history) 
 892   
 893          if history:   
 894              historyPhrase = Cu8.__name__ + ' r%s at %s deprecated: ' % \ 
 895                (self._newVersNum, self._timeStamp) + ' '.join(history) 
 896   
 897              self._addHistory(historyPhrase, pHeader, numSpareKeys) 
 898   
 899              if self.copyFits: 
 900                  Logger.addMessage("FITS: 0) " + historyPhrase, alwaysLog=False) 
  901   
 902       
 903   
 904 -    def _getAttr(self, kywrd, infoArray, row): 
  905          if infoArray[row][:len(kywrd)] == kywrd: 
 906              if kywrd != 'HISTORY' and 'NUM' not in kywrd: 
 907                  return float(infoArray[row].split('=')[1].split()[0]) 
 908              elif 'NUM' in kywrd: 
 909                  return int(infoArray[row].split('=')[1].split()[0]) 
 910              else: 
 911                  return infoArray[row][7:-1] 
  912   
 913       
 914   
 916          """ 
 917          Tests whether a FITS header has already been updated with the latest 
 918          recalibration values. 
 919   
 920          @param header: 
 921          @type  header: 
 922   
 923          @return: True if update required. 
 924          @rtype: 
 925   
 926          """ 
 927          for key, value in header.items(): 
 928              if key == 'HISTORY' and value.startswith(Cu8.__name__): 
 929                  revNum = value.split()[1].replace('r', '') 
 930                  if revNum.isdigit() and int(revNum) == self._newVersNum: 
 931                      return False 
 932          return True 
  933   
 934       
 935   
 936 -    def _shrinkHistory(self, header): 
  937          """ 
 938          Consolidates previous CASU HISTORY cards into a smaller amount of 
 939          header space, to help prevent the creation of new header blocks. 
 940   
 941          @param header: FITS file header to update. 
 942          @type  header: pyfits.Header 
 943   
 944          @return: Number of header keys freed up. 
 945          @rtype:  int 
 946   
 947          """ 
 948           
 949          casuHistoryIndices = [] 
 950          prevLine = '' 
 951          numAdded = 0 
 952           
 953          for index, (key, value) in enumerate(header.items()): 
 954              if key == 'HISTORY': 
 955                   
 956                  if prevLine and prevLine.split()[0].isdigit(): 
 957                      casuHistoryIndices += [index - 1, index] 
 958                      combinedLine = '%s at %s' % (' '.join(value.split()[1:5]), 
 959                                                   prevLine) 
 960                       
 961                       
 962                      if len(combinedLine) > fits.maxHdrEntryLen(): 
 963                          breakIndex = 1 + \ 
 964                              combinedLine.rfind(' ', 0, fits.maxHdrEntryLen()) 
 965                          if not breakIndex \ 
 966                             or combinedLine[fits.maxHdrEntryLen()] == ' ': 
 967                              breakIndex = fits.maxHdrEntryLen() 
 968                          header.add_history(combinedLine[:breakIndex]) 
 969                          numAdded += 1 
 970                          combinedLine = '(cont.) ' + combinedLine[breakIndex:].lstrip() 
 971                      header.add_history(combinedLine) 
 972                      numAdded += 1 
 973                   
 974                  elif value.strip().startswith('$'): 
 975                      casuHistoryIndices += [index] 
 976                      try:  
 977                          header.add_history(' '.join(value.split()[1:6])) 
 978                      except:  
 979                          header.add_history( 
 980                            value.replace('$', '').replace('Id:', '').strip()) 
 981                      numAdded += 1 
 982                  prevLine = value 
 983   
 984           
 985          numDeleted = 0 
 986          for index in casuHistoryIndices: 
 987              del header[index - numDeleted]   
 988              numDeleted += 1 
 989   
 990          return numDeleted - numAdded 
  991   
 992       
 993   
 995          """ 
 996          Updates the database multiframe entry for the new zero-point values, 
 997          copying the old values into the PreviousMFDZP table. Then applies 
 998          zero-point change to detections from that multiframe as a photometric 
 999          magnitude recalibration. 
1000   
1001          @param mfID:     Multiframe ID to update. 
1002          @type  mfID:     int 
1003          @param extNums:  List of extensions to be updated for this multiframe. 
1004          @type  extNums:  list(int) 
1005          @param tables:   Set of detection tables containing detections from 
1006                           this multiframe. 
1007          @type  tables:   set(str) 
1008          @param zpValues: The new zero-point values referenced by FITS keywords. 
1009          @type  zpValues: dict(str:float) 
1010   
1011          """ 
1012          filterTable = df.Table("Filter", self.archive) 
1013   
1014           
1015          for extNum in extNums: 
1016              curExt = "multiframeID=%s AND extNum=%s" % (mfID, extNum) 
1017   
1018               
1019              oldZP, filterID = self.archive.query("photZPCat,filterID", 
1020                                         "MultiframeDetector", 
1021                curExt, firstOnly=True, default=dbc.realDefault()) 
1022              newZP = zpValues.get("MAGZPT_%s" % (extNum - 1), dbc.realDefault()) 
1023   
1024              filterTable.setCurRow(filterID=filterID) 
1025              shortName = filterTable.getAttr("shortName") 
1026               
1027               
1028               
1029              if newZP > 0 and oldZP > 0: 
1030                  deltaZP = newZP - oldZP 
1031              else: 
1032                  deltaZP = 0 
1033                  self._numWarnings += 1 
1034                  Logger.addMessage("DB: <Warning> Negative zero-point in %s.\n" 
1035                    "Original zero-point = %s\nNew zero-point = %s" % 
1036                    (curExt, oldZP, newZP)) 
1037   
1038              if abs(deltaZP) > Cu8.highDeltaZP: 
1039                  self._numWarnings += 1 
1040                  Logger.addMessage("DB: <Warning> Large change in " 
1041                    "zero-point of %s magnitude for %s" % (deltaZP, curExt)) 
1042   
1043               
1044              newValues = [] 
1045              for attr, keyword in self._mfdAttrKeys: 
1046                  try: 
1047                      newValues.append( 
1048                        (attr, str(zpValues[keyword + "_%s" % (extNum - 1)]))) 
1049                  except KeyError: 
1050                      pass   
1051   
1052              if not newValues: 
1053                  raise RuntimeError("Request to update %s, but no zero-point " 
1054                    "values were supplied! In this case, such a request should " 
1055                    "not have been made and indicates a software design error." 
1056                    % curExt) 
1057   
1058               
1059              self.archive.copyIntoTable("PreviousMFDZP", "MultiframeDetector", 
1060                self._prevMDZPattrs, where=curExt) 
1061   
1062               
1063              self.archive.update("MultiframeDetector", newValues, curExt) 
1064   
1065               
1066              if abs(deltaZP) > Cu8.minimumDeltaZP: 
1067                  for photTable in tables: 
1068                       
1069                       
1070                       
1071                      if photTable.endswith('SynopticSource'): 
1072                          self._updateSynopticSource(photTable, deltaZP, 
1073                                                     shortName, mfID, extNum) 
1074   
1075                      else: 
1076                          attrUpdates = [(attr, '%s+%s' % (attr, deltaZP)) 
1077                                         for attr in self._recalSchema[photTable]] 
1078   
1079                          self.archive.update(photTable, attrUpdates, curExt) 
 1080   
1081       
1082   
1084          """ ? 
1085          """ 
1086          attrUpdates = [(attr, '%s+%s' % (attr, deltaZP)) 
1087            for attr in self._recalSchema[photTable] 
1088             if shortName.lower() in attr and 'pnt' not in attr.lower()] 
1089   
1090          mergeLog = photTable.replace("Source", "MergeLog") + " AS l" 
1091          whereStr = "l.synFrameSetID=%s.synFrameSetID AND " 
1092          whereStr += "l.%smfID=%s AND l.%seNum=%s AND %sseqNum>0" 
1093          whereStr %= (photTable, shortName, mfID, shortName, extNum, shortName) 
1094   
1095          self.archive.update(photTable, attrUpdates, whereStr, mergeLog) 
1096   
1097           
1098          colUpdates = [[(attr, '%s+%s' % (attr, deltaZP)) 
1099            for attr in self._recalSchema[photTable] 
1100             if 'pnt' in attr.lower() and shortName.lower() + 'm' in attr]] 
1101   
1102          deltaZP = ("-%s" % deltaZP if deltaZP >= 0 else "+%s" % abs(deltaZP)) 
1103          colUpdates.append([(attr, attr + deltaZP) 
1104            for attr in self._recalSchema[photTable] 
1105             if 'pnt' in attr.lower() and 'm' + shortName.lower() in attr]) 
1106   
1107          for attrUpdates in colUpdates: 
1108              if attrUpdates: 
1109                  self.archive.update(photTable, attrUpdates, whereStr, mergeLog) 
 1110   
1111       
1112   
1114          """ 
1115          WRITE SOMETHING HERE 
1116          """ 
1117          Logger.addMessage("FITS: %s files will be updated." % len(calibList)) 
1118          progress = ForLoopMonitor(calibList, threadID="FITS") 
1119   
1120          for _mfID, imgPathName, catPathName, _extNums, zpValues in calibList: 
1121   
1122              if self._success == False:   
1123                  Logger.addMessage("Halting FITS thread early. Please restart " 
1124                                    "from file: " + catPathName) 
1125                  break 
1126   
1127              if self.copyFits:  
1128                  testPath = self.sysc.testOutputPath(Cu8.testDir) 
1129                  if not os.path.exists(testPath): 
1130                      os.mkdir(testPath) 
1131   
1132                  newImgPathName = \ 
1133                      os.path.join(testPath, os.path.basename(imgPathName)) 
1134                  if not os.path.exists(newImgPathName): 
1135                      os.system('cp -auvfx %s %s' 
1136                                  % (imgPathName, newImgPathName)) 
1137                  imgPathName = newImgPathName 
1138   
1139                  newCatPathName = \ 
1140                      os.path.join(testPath, os.path.basename(catPathName)) 
1141                  if not os.path.exists(newCatPathName): 
1142                      os.system('cp -auvfx %s %s' 
1143                                  % (catPathName, newCatPathName)) 
1144                  catPathName = newCatPathName 
1145   
1146              self._timeStamp = utils.makeTimeStamp() 
1147   
1148               
1149              try: 
1150                  self._updateCatalogueFile(catPathName, zpValues) 
1151              except Exception as error: 
1152                  self._numErrors += 1 
1153                  eType, eMessage = Logger.extractExceptionDetails(error) 
1154                  Logger.addMessage("FITS: <ERROR> Could not update file %s " 
1155                    "error message: %s: %s" % (catPathName, eType, eMessage)) 
1156                  self.logBrokenFile(catPathName) 
1157   
1158               
1159              try: 
1160                  self._updateImageFile(imgPathName, zpValues) 
1161              except Exception as error: 
1162                  self._numErrors += 1 
1163                  eType, eMessage = Logger.extractExceptionDetails(error) 
1164                  Logger.addMessage("FITS: <ERROR> Could not update file %s " 
1165                    "error message: %s: %s" % (imgPathName, eType, eMessage)) 
1166                  self.logBrokenFile(imgPathName) 
1167   
1168              progress.testForOutput() 
 1169   
1170       
1171   
1173          """ 
1174          WRITE SOMETHING HERE 
1175          """ 
1176           
1177          origModTime = os.stat(catPathName).st_mtime 
1178          catFile = fits.open(catPathName, 'update') 
1179          try: 
1180               
1181              if self._requiresUpdate(catFile[1].header): 
1182                  for extNum, hdu in enumerate(catFile): 
1183                       
1184                       
1185                      if extNum is not 0: 
1186                          self._updateKeywords(hdu.header, extNum, zpValues) 
1187                           
1188                          fits.fixExponentBug(hdu.header) 
1189   
1190                  if self.copyFits and Logger.isVerbose: 
1191                       
1192                      catStart = time.time() 
1193   
1194                  catFile.flush() 
1195   
1196                   
1197                   
1198                  Logger.addMessage("FITS: Updated " + catPathName, echoOff=True) 
1199                  if self.copyFits and Logger.isVerbose: 
1200                      Logger.addMessage("FITS: Closing Catalogue: %s s" % 
1201                                       (time.time() - catStart), alwaysLog=False) 
1202              else: 
1203                  Logger.addMessage("FITS: No update required for " 
1204                                    + catPathName, echoOff=True) 
1205          finally: 
1206              catFile.close() 
1207              os.utime(catPathName, (time.time(), origModTime)) 
 1208   
1209       
1210   
1212          """ 
1213          WRITE SOMETHING HERE 
1214          """ 
1215           
1216          origModTime = os.stat(imgPathName).st_mtime 
1217          imgFile = fits.open(imgPathName, 'update') 
1218          try: 
1219               
1220              if self._requiresUpdate(imgFile[1].header): 
1221                   
1222                  pHeader = imgFile[0].header 
1223                  self._deprecateKeywords(pHeader) 
1224                   
1225                  fits.fixExponentBug(pHeader) 
1226   
1227                   
1228                  for extNum, hdu in enumerate(imgFile): 
1229                       
1230                      if extNum is not 0: 
1231                          self._updateKeywords(hdu.header, extNum, zpValues) 
1232   
1233                  if self.copyFits and Logger.isVerbose: 
1234                       
1235                      imgStart = time.time() 
1236   
1237                  imgFile.flush() 
1238   
1239                   
1240                   
1241                  Logger.addMessage("FITS: Updated " + imgPathName, echoOff=True) 
1242                  if self.copyFits and Logger.isVerbose: 
1243                      Logger.addMessage("FITS: Closing Image: %s s" % 
1244                                       (time.time() - imgStart), alwaysLog=False) 
1245              else: 
1246                  Logger.addMessage("FITS: No update required for " 
1247                                    + imgPathName, echoOff=True) 
1248          finally: 
1249              imgFile.close() 
1250              os.utime(imgPathName, (time.time(), origModTime)) 
 1251   
1252       
1253   
1255          """ 
1256          Update keywords for latest recalibration values and update 
1257          HISTORY card. 
1258   
1259          @param header: FITS file header to update. 
1260          @type  header: pyfits.Header 
1261          @param extNum: Extension number of given header. 
1262          @type  extNum: int 
1263   
1264          """ 
1265           
1266          while True: 
1267              if header[-1] == Cu8.paddingLine: 
1268                  del header[-1] 
1269              else: 
1270                  break 
1271           
1272          numSpareKeys = (fits.maxHdrBlockKeys() - len(header.items()) 
1273                          % fits.maxHdrBlockKeys()) 
1274   
1275          historyPhrase = Cu8.__name__ + ' r%s ' % self._newVersNum 
1276          casuHistory = zpValues.get('HISTORY_%s' % extNum, '') 
1277          if casuHistory: 
1278              try:  
1279                  casuHistory = ' '.join(casuHistory.split()[1:3]) 
1280              except:  
1281                  casuHistory = \ 
1282                    casuHistory.replace('$', '').replace('Id:', '').strip() 
1283              historyPhrase += 'via %s ' % casuHistory 
1284          historyPhrase += 'at %s' % self._timeStamp 
1285   
1286          history = [] 
1287          for key, desc in Cu8.calibKeys: 
1288               
1289              newValue = zpValues.get('%s_%s' % (key, extNum)) 
1290              if newValue is not None: 
1291                   
1292                   
1293                  if not header.has_key(key): 
1294                      numSpareKeys -= 1 
1295                   
1296                  history.append('%s=%s' % (key, header.get(key, 'None'))) 
1297   
1298                  header.update(key, newValue, desc) 
1299   
1300          if history: 
1301              historyPhrase += ' old values: ' + ' '.join(history) 
1302          self._addHistory(historyPhrase, header, numSpareKeys) 
1303   
1304          if self.copyFits and history: 
1305              Logger.addMessage("FITS: %s) " % extNum + historyPhrase, 
1306                                alwaysLog=False) 
  1307   
1308   
1309   
1310 -def getZPHistory(db, mfIDs): 
 1311      """ Check whether tile ZP updates have been propagated to pawprints. 
1312      using versNum. If not calculate difference in ZP so far. 
1313      """ 
1314      tilePrev = db.query( 
1315          selectStr="multiframeID,pmz.versNum,photZPCat", 
1316          fromStr="PreviousMfdZP as pmz, PhotCalVers as pcv," 
1317            "ArchiveCurationHistory as ach", 
1318          whereStr="multiframeID in (%s) and pmz.versNum=pcv.versNum and " 
1319            "pcv.cuEventID=ach.cuEventID and ach.comment like " 
1320            "'%%Relative recalibration%%'" % mfIDs) 
1321      tilePrevDict = defaultdict(list) 
1322      for key, val, phZP in tilePrev: 
1323          tilePrevDict[key].append((val, phZP)) 
1324       
1325      inpStacks = SelectSQL("multiframeID", "Provenance", 
1326                          "combiframeID in (%s)" % mfIDs) 
1327       
1328      mfExts = db.query( 
1329          selectStr="d.multiframeID,d.extNum,dt.photZPCat,combiFrameID," 
1330            "d.photZPCat,d.photZPErrCat,d.numZPCat,d.nightZPCat,d.nightZPErrCat," 
1331            "d.nightZPNum, catName", 
1332          fromStr="MultiframeDetector as d,Provenance as p," 
1333            "MultiframeDetector as dt,Multiframe as m", 
1334          whereStr="d.multiframeID=p.multiframeID and p.combiframeID in " 
1335            "(%s) and d.%s and p.combiframeID=dt.multiframeID and " 
1336            "m.multiframeID=d.multiframeID" 
1337            % (mfIDs, DepCodes.selectNonDeprecated)) 
1338       
1339      lastRecalPaw = db.query( 
1340          selectStr="multiframeID,extNum,max(pmz.versNum)", 
1341          fromStr="PreviousMfdZP as pmz, PhotCalVers as pcv," 
1342            "ArchiveCurationHistory as ach", 
1343          whereStr="multiframeID in (%s) and pmz.versNum=pcv.versNum and " 
1344            "pcv.cuEventID=ach.cuEventID and ach.comment like " 
1345            "'%%Relative recalibration%%' group by multiframeID,extNum" 
1346          % inpStacks) 
1347      lastNonRecalPaw = db.query( 
1348          selectStr="multiframeID,extNum,max(pmz.versNum)", 
1349          fromStr="PreviousMfdZP as pmz, PhotCalVers as pcv," 
1350            "ArchiveCurationHistory as ach", 
1351          whereStr="multiframeID in (%s) and pmz.versNum=pcv.versNum and " 
1352            "pcv.cuEventID=ach.cuEventID and ach.comment not like " 
1353            "'%%Relative recalibration%%' group by multiframeID,extNum" 
1354          % inpStacks) 
1355   
1356      deltaZPDict = defaultdict(list) 
1357   
1358      for mfID, extN, curTileZP, tileMfID, curZP, curZPErr, curZPNum, \ 
1359         nightZP, nightZPErr, nightNum, catName in mfExts: 
1360          vers = [maxVN for mf, ex, maxVN in lastRecalPaw 
1361                  if mf == mfID and ex == extN] 
1362          othRecalVers = [maxVN for mf, ex, maxVN in lastNonRecalPaw 
1363                         if mf == mfID and ex == extN] 
1364           
1365          recalVers = vers[0] if vers else 0 
1366          othVers = othRecalVers[0] if othRecalVers else 0 
1367           
1368          unPropVers = [versNo for versNo, _ZP in tilePrevDict[tileMfID] 
1369                        if versNo > recalVers] 
1370          earlyUnPropVers = min(unPropVers) if unPropVers else None 
1371   
1372          if unPropVers and earlyUnPropVers < othVers: 
1373               
1374              raise Cu8.CuError("External recalibration in between update " 
1375                                "of pawprints - need to finish off code") 
1376          elif unPropVers: 
1377              deltaZP = curTileZP - [ZP for versNo, ZP in tilePrevDict[tileMfID] 
1378                                     if versNo == earlyUnPropVers][0] 
1379          else: 
1380              deltaZP = 0 
1381          deltaZPDict[tileMfID].append((mfID, extN, deltaZP, curZP, curZPErr, 
1382                                   curZPNum, nightZP, nightZPErr, nightNum, 
1383                                   catName)) 
1384      return deltaZPDict 
 1385   
1386   
1387   
1389      """ Produce calibration lines 
1390      """ 
1391      calibrationLines = [] 
1392   
1393      for _mfID, extNum, dZP, zp, zpErr, zpN, nZP, nZPErr, nZPNum, catName \ 
1394          in pawPrintZPList: 
1395          newZP = zp + dZP + deltaZP 
1396          calibrationLines += [ 
1397              "%s #%d\n" % (os.path.basename(catName), (extNum - 1)), 
1398              "MAGZPT  = %5.4f / %s\n" % (newZP, Cu8.zpComment), 
1399              "MAGZRR  = %5.4f / %s\n" % (zpErr, Cu8.zpErrComment), 
1400              "NUMZPT  = %d / %s\n" % (zpN, Cu8.numZPComment), 
1401              "NIGHTZPT  = %5.4f / %s\n" % (nZP, Cu8.nightZPComment), 
1402              "NIGHTZRR  = %5.4f / %s\n" % (nZPErr, Cu8.nightZPErrComment), 
1403              "NIGHTNUM  = %d / %s\n" % (nZPNum, Cu8.nightNumComment), 
1404              "HISTORY %s\n" % (history)] 
1405      return calibrationLines 
 1406   
1407   
1408   
1409   
1410   
1411  if __name__ == '__main__': 
1412       
1413      CLI.progOpts += [ 
1414        CLI.Option('f', 'from', "continues recalibration from this multiframeID " 
1415                   "and catalogue file onwards, if no file given then no FITS " 
1416                   "updates, if MFID=0 then all multiframes in list are updated." 
1417                   " (example: --from='478529, w200YMMDD_XXXXX_st_cat.fits')", 
1418                   'MFID, FILE', isValOK=lambda x: csv.values(x)[0].isdigit()), 
1419        CLI.Option('m', 'manual', 
1420                   'manually apply values in this CASU-supplied file', 'FILE', 
1421                   isValOK=CLI.assertFileExists), 
1422        CLI.Option('n', 'no_update', 
1423                   "don't update original FITS files only copies"), 
1424        CLI.Option('v', 'verbose', 'display FITS update time details')] 
1425   
1426      cli = CLI(Cu8.__name__, "$Revision: 10034 $", Cu8.__doc__) 
1427      Logger.isVerbose = cli.getOpt('verbose') 
1428      Logger.addMessage(cli.getProgDetails()) 
1429   
1430      cu = Cu8(cli=cli) 
1431      cu.copyFits = cli.getOpt('no_update') 
1432      if cli.getOpt('manual'): 
1433          cu.tablePathName = cli.getOpt('manual') 
1434      if cli.getOpt('from'): 
1435          fromMf = list(csv.values(cli.getOpt('from'))) 
1436          fromMf[0] = int(fromMf[0]) 
1437          cu.fromMf = tuple(fromMf) 
1438   
1439      cu.run() 
1440      del cu    
1441   
1442   
1443   
1444   
1445   
1446   
1447   
1448   
1449   
1450   
1451   
1452   
1453   
1454   
1455   
1456   
1457   
1458   
1459   
1460   
1461   
1462   
1463   
1464   
1465   
1466   
1467   
1468   
1469   
1470   
1471   
1472   
1473   
1474   
1475   
1476   
1477   
1478   
1479   
1480   
1481   
1482   
1483   
1484   
1485   
1486   
1487   
1488   
1489   
1490   
1491   
1492   
1493   
1494   
1495   
1496   
1497   
1498   
1499