Package invocations :: Package cu7 :: Module cu7
[hide private]

Source Code for Module invocations.cu7.cu7

  1  #! /usr/bin/env python 
  2  #------------------------------------------------------------------------------ 
  3  #$Id: cu7.py 8615 2011-08-19 15:48:22Z RossCollins $ 
  4  """ 
  5     Invoke CU7. Create/update merged source catalogue for a given programme. 
  6   
  7     @author: N.C. Hambly 
  8     @org:    WFAU, IfA, University of Edinburgh 
  9   
 10     @newfield contributors: Contributors, Contributors (Alphabetical Order) 
 11     @contributors: R.S. Collins, E. Sutorius 
 12  """ 
 13  #------------------------------------------------------------------------------ 
 14  from __future__      import division, print_function 
 15  from future_builtins import map, zip 
 16   
 17  import math 
 18  from   operator    import attrgetter, itemgetter 
 19  import os 
 20   
 21  import wsatools.Astrometry              as astro 
 22  from   wsatools.CLI                 import CLI 
 23  import wsatools.CSV                     as csv 
 24  import wsatools.DbConnect.CommonQueries as queries 
 25  import wsatools.DbConnect.DbConstants   as dbc 
 26  from   wsatools.DbConnect.DbSession import odbc, Ingester 
 27  import wsatools.DbConnect.Schema        as schema 
 28  from   wsatools.Logger              import Logger 
 29  from   wsatools.SourceMerger        import SourceMerger 
 30  from   wsatools.SystemConstants     import DepCodes 
 31  #------------------------------------------------------------------------------ 
 32   
33 -class Cu7(SourceMerger):
34 """ Create/update merged source catalogue for a given programme. 35 """ 36 #-------------------------------------------------------------------------- 37 # Define class constants (access as Cu7.varName) 38 39 cuNum = 7 40 epochTolerance = {dbc.ukidssLasProgrammeID(): 550, 41 dbc.ukidssGcsProgrammeID(): 550, 42 dbc.ukidssGpsProgrammeID(): 730} 43 """ Minimum duration in days between separate epochs for each programme. 44 Normally this would be two years for UKIDSS surveys. 45 """ 46 # Private class parameters 47 _matchExtensions = True 48 """ Option to match extension numbers when doing the frame association into 49 frame sets. Presently, all surveys can use this additional feature to 50 avoid incorrect frame association in the presence of highly non-uniform 51 tiling resulting from guide star limited tile placement in the SDT. 52 """ 53 #-------------------------------------------------------------------------- 54 # Define public member variable default values (access as obj.varName) 55 # these need to be set from command-line options 56 57 createNewTable = False 58 """ Destroy the existing source table and start from scratch? 59 """ 60 enableIngest = True 61 """ Ingest results into database? 62 """ 63 isMultiEpoch = True 64 """ Consider multiple epochs when associating frame sets? 65 """ 66 onlyMerge = False 67 """ Only update the source table with newly merged sources without 68 reseaming? 69 """ 70 onlyReseam = False 71 """ Only reseam sources? 72 """ 73 prepareOnly = False 74 """ Only prepare the framesets and update mergelog, without source merging? 75 """ 76 skipQualityCheck = False 77 """ Override quality bit flag check? 78 """ 79 #-------------------------------------------------------------------------- 80 # Private member variables 81 82 _frameData = None 83 """ A complete set of frame data for the current programme as a dictionary 84 referenced by a multiframeID, extNum pair 85 """ 86 #-------------------------------------------------------------------------- 87
88 - def _onRun(self):
89 """ Create/update merged source catalogue. 90 """ 91 self.isMultiEpoch = \ 92 self.isMultiEpoch and self.programmeID in Cu7.epochTolerance 93 94 Logger.addMessage("Multiple epoch frame set association is switched " 95 + ("ON" if self.isMultiEpoch else "OFF")) 96 97 super(Cu7, self)._onRun() 98 99 # If the merge log is empty then we will automatically completely 100 # reseam the source table. This allows us to mark the frame sets as old 101 # incrementally whilst seaming is going on. 102 self.doCompleteReseam = self.doCompleteReseam or self.createNewTable \ 103 or not self.archive.queryEntriesExist(self._mergeLog) 104 105 if self.onlyReseam: 106 self.reseamSources() 107 return 108 109 if self.enableIngest: 110 ingSchema = [self._progSchema[self._mergeLog]] 111 if not self.prepareOnly: 112 ingSchema.append(self._progSchema[self._sourceTable]) 113 try: 114 ingester = Ingester(self.archive, ingSchema, self.shareFileID, 115 skipSchemaCheck=self.createNewTable) 116 except schema.MismatchError as error: 117 raise Cu7.CuError(error) 118 119 if not self.prepareOnly: 120 Logger.addMessage( 121 "Checking that the quality bit flags are all set") 122 if not self.skipQualityCheck and \ 123 not self._isQualityFlaggingComplete(): 124 raise Cu7.CuError("Detection quality bit flags have not " 125 "been updated for this programme!") 126 127 if self.createNewTable: 128 Logger.addMessage("Destroying & recreating %s and emptying %s" % 129 (self._sourceTable, self._mergeLog)) 130 self.archive.dropTable(self._progSchema[self._sourceTable], 131 self._progSchema.values()) 132 self.archive.delete(self._mergeLog) 133 if self.programme.isDeep(): 134 self.archive.delete(self.programme.getSynopticBestMatchTable()) 135 self.archive.createTable(self._progSchema[self._sourceTable], 136 self._progSchema.values()) 137 else: 138 # Mark frame sets deprecated if they include a frame that was 139 # deprecated on the last run of the frame ingestion procedure or if 140 # they were merged by a previous version of this code. 141 Logger.addMessage("Updating frame sets for deprecated frames ...") 142 Logger.addMessage("... number of frame sets deprecated: %s" % 143 self.deprecateOldFrameSets()) 144 145 # Remove any merged sources from the source table if they are based 146 # on frame sets that have now been deprecated: 147 Logger.addMessage("Removing sources based on deprecated frame " 148 "sets...") 149 Logger.addMessage("... number of sources removed: %s" % 150 self._removeDeprecatedSources(self._sourceTable)) 151 152 #### Create list of new frame sets to be merged #### 153 154 # Create a new list of frame sets to be merged in this run (at the same 155 # time, make a list of existing, but incomplete, frame sets that will 156 # have new frames added to them): 157 newFrameSets = self._makeNewFrameSets() 158 159 if not newFrameSets: 160 msg = "No new source merging required for this programme." 161 # See if there is any reseaming to do 162 if self.enableIngest and not self.prepareOnly and \ 163 not self.onlyMerge and self.archive.queryEntriesExist( 164 self._mergeLog, where="newFrameSet=%s" % dbc.yes()): 165 Logger.addMessage("<Info> " + msg) 166 self.reseamSources() 167 return 168 else: 169 raise Cu7.CuError(msg) 170 171 if not self.prepareOnly: 172 # Outgest and merge detections with C/C++ code, creating binary 173 # source table for ingest 174 sourceTablePathName, mergeLogPathName, isComplete = \ 175 self.mergeSources(newFrameSets, 176 matchRadius=3600 * self.programme.getAttr('pairingCriterion')) 177 else: 178 # Write the updated merge log of new (plus updated) frame sets (new 179 # frame sets flagged using the merge log attribute "new") to a CSV 180 # file 181 mergeLogPathName = \ 182 os.path.join(self.sysc.tempWorkPath(), 'mergeLog.csv') 183 csv.File(mergeLogPathName, 'w+').writelines(set(newFrameSets)) 184 185 # Ingest merge log, source table and then reseam merged sources. 186 if self.enableIngest: 187 try: 188 ingester.ingestTable(self._mergeLog, mergeLogPathName, 189 isCsv=True) 190 except odbc.DatabaseError as error: 191 if 'STREAM' in str(error): 192 Logger.addMessage( 193 "<ERROR> %s ingest failed. " % self._mergeLog + 194 "Possibly because of a discrepancy between the schema " 195 "and the entries in RequiredFilters.") 196 raise 197 198 if not self.prepareOnly: 199 try: 200 sourcesIngested = ingester.ingestTable(self._sourceTable, 201 sourceTablePathName, self._idxInfo, isOrdered=True) 202 except Exception as error: 203 Logger.addExceptionMessage(error) 204 Logger.addMessage("Attempting to rollback merge log...") 205 Logger.addMessage("%s rows removed from table %s" % 206 (self.archive.delete(self._mergeLog, 207 whereStr="cuEventID=%s" % self.cuEventID), 208 self._mergeLog)) 209 raise 210 211 if not isComplete: 212 raise Cu7.CuError("Source merging incomplete. Please " 213 "re-run in append mode with the same date range.") 214 215 # Reseam the merged source table based on the data available 216 # ... so far 217 if not self.onlyMerge and sourcesIngested: 218 self.reseamSources() 219 220 elif not self.onlyMerge and not self.prepareOnly: 221 Logger.addMessage("<Warning> %s contains no merged sources, " 222 "so no reseaming can be performed." % self._sourceTable)
223 224 #-------------------------------------------------------------------------- 225
226 - def _createFrameSet(self, nextFrameSetID, matchingFrames, availableFrames):
227 """ 228 Creates a new frame set with the given frameSetID out of the given list 229 of frames in the same pointing. Also, updates availableFrames as 230 required. 231 232 @param nextFrameSetID: Unique ID to assign to the new frame set. 233 @type nextFrameSetID: int 234 @param matchingFrames: Candidate frames of the same position to form 235 the frame set from. 236 @type matchingFrames: list(Frame) 237 @param availableFrames: A complete list of frames currently available 238 for merging. 239 @type availableFrames: list(Frame) 240 241 @return: The new frame set. 242 @rtype: SourceMerger.FrameSet 243 244 """ 245 masterFrame = matchingFrames[0] 246 frameCentre = (masterFrame.centralRA, masterFrame.centralDec) 247 248 # Form the frames in the matching list into a new frame set: 249 newFrameSet = [nextFrameSetID, self.cuEventID] + list(masterFrame[:6]) 250 251 for filterID, numEpochs in self._filterPasses: 252 # Reduce list to just those frames with the same passband 253 frames = [frame for frame in matchingFrames 254 if frame.filterID == filterID] 255 frames.sort(key=attrgetter("mjdObs")) 256 257 for passNum in range(1, numEpochs+1): 258 bestFrame = self._pickBestFrame(frames, passNum, numEpochs, 259 frameCentre) 260 if bestFrame: 261 newFrameSet += [bestFrame.multiframeID, bestFrame.extNum] 262 newFrameSet += [dbc.yes(), dbc.no()] 263 if bestFrame != masterFrame: 264 # Master frame is already removed from available frames 265 # @@NOTE: If this line fails examine algorithm closely. 266 availableFrames.remove(bestFrame) 267 else: 268 # set frame identifiers for this pass to defaults 269 newFrameSet += [dbc.intDefault()] 270 newFrameSet += 3*[dbc.tinyIntDefault()] 271 272 # Fill in the final frame set attributes: 273 newFrameSet += [Cu7.swVersion, dbc.yes(), dbc.no()] 274 275 return self.FrameSet(*newFrameSet)
276 277 #-------------------------------------------------------------------------- 278
279 - def _getAvailableFrames(self, frameSets):
280 """ 281 Extract from the programme's associated image frame table all the 282 frames that should have catalogues and are available for merging. If 283 there exists any deep frames for a given filter then only deep frames 284 will be selected for that filter. Any frames that are part of existing 285 non-deprecated frame-sets will be removed. 286 287 @param frameSets: List of existing non-deprecated frame sets. 288 @type frameSets: list(FrameSet) 289 290 @return: Frame metadata for mergeable frames. 291 @rtype: list(Frame) 292 293 """ 294 columns = "D.%s, F.%s" % ( 295 ", D.".join(['centralRA', 'centralDec', 'cx', 'cy', 'cz', 'htmID', 296 'multiframeID', 'extNum']), 297 ", F.".join(['filterID', 'mjdObs', 'filename', 'confID', 'raBase', 298 'decBase'])) 299 300 tables = "CurrentAstrometry AS D, MultiframeDetector AS MFD, "\ 301 "Multiframe AS F, ProgrammeFrame AS P" 302 303 selection = ("D.multiframeID=MFD.multiframeID AND D.extNum=MFD.extNum" 304 " AND D.multiframeID=P.multiframeID" 305 " AND D.multiframeID=F.multiframeID" 306 " AND utdate BETWEEN '%s' AND '%s'" % self.dateRange 307 +" AND F.%s AND MFD.%s"%((DepCodes.selectNonDeprecated,)*2) 308 + " AND frameType LIKE '%stack'") 309 310 if not self.programme.isAutomated(): 311 # UKIDSS shallow surveys - manual setup 312 frames = self.archive.query(columns, tables, 313 whereStr=selection + " AND programmeID=%s" % self.programmeID, 314 orderBy="centralDec") 315 else: 316 # Automatically set up programme with productIDs assigned 317 curRelNum = \ 318 self.archive.queryAttrMax("releaseNum", "ProgrammeFrame", 319 where="productID>0 AND programmeID=%s" % self.programmeID) 320 321 if not curRelNum: 322 raise Cu7.CuError("This programme has no products assigned in " 323 "ProgrammeFrame.") 324 325 prodType = self.programme.getAttr("sourceProdType") 326 if prodType != 'stack': 327 selection += " AND frameType LIKE '%%%s%%'" % prodType 328 329 # Use fieldID in Required<Product> 330 frames = self.archive.query(columns + ", fieldID", 331 fromStr=tables + ", Required%s AS R" % prodType, 332 whereStr=selection + " AND R.productID=P.productID" 333 +" AND P.productID>0 AND R.programmeID=P.programmeID" 334 " AND P.programmeID=%s AND releaseNum=%s" 335 % (self.programmeID, curRelNum), 336 orderBy="fieldID") 337 338 self._frameData = dict(((frame.multiframeID, frame.extNum), frame) 339 for frame in frames) 340 341 mergedFrames = set() 342 for frameSet in frameSets: 343 mergedFrames.update((frameSet[index], frameSet[index+1]) 344 for index in self._mfidIndices) 345 346 return [frame for frame in frames 347 if (frame.multiframeID, frame.extNum) not in mergedFrames]
348 349 #-------------------------------------------------------------------------- 350
351 - def _makeNewFrameSets(self):
352 """ 353 Create lists of frame sets that are new and/or are to be updated. 354 Checks for updates to existing frame sets, deleting stack items as they 355 are checked off or merged in. Creates new frame sets as appropriate 356 based on available frames, deleting stack items as they are merged in. 357 Reports a warning for all current frames that have not been merged in 358 (for whatever reason); also logs successful integration of frames into 359 frame sets. 360 361 Note that since the original versions of this algorithm, new features 362 have been incorporated to give an option for matching detector frame 363 extension numbers in addition to the usual proximity match. 364 365 @return: List of new frame sets. 366 @rtype: list(SourceMerger.FrameSet) 367 368 """ 369 #### Checking for newly available frames to update existing frame sets 370 371 # Find next frame set ID before existing frame sets are deleted. 372 nextFrameSetID = \ 373 self._getNextID(self._frameSetIDName, self._mergeLog, addProgID=True) 374 375 frameSets = self._getExistingFrameSets() 376 377 Logger.addMessage("Creating list of available frames...") 378 availableFrames = self._getAvailableFrames(frameSets) 379 Logger.addMessage("...number of frames available for this " 380 "programme: %s" % len(availableFrames)) 381 382 # Indices for tuple defining frame in availableFrames list 383 extensionIdx = (availableFrames[0]._fields.index('extNum') 384 if availableFrames else None) 385 386 raIndex = (availableFrames[0]._fields.index('centralRA') 387 if availableFrames else None) 388 389 Logger.addMessage("Creating list of updated frame sets...") 390 updatedFrameSets = [] 391 for frameSet in frameSets: 392 if not self.programme.isAutomated(): 393 # Match frame positions within tolerance - UKIDSS shallow only 394 frameSetTol = self.programme.getAttr('frameSetTolerance') 395 396 # work out what extension number is to be matched if required: 397 extNum = (self._modalExtNum(frameSet) 398 if self._matchExtensions else None) 399 400 matchingFrames = astro.matchFrames(availableFrames, raIndex, 401 (frameSet.ra, frameSet.dec), frameSetTol, extNum, 402 extensionIdx) 403 else: 404 # Match frames using product fieldIDs 405 fieldID = \ 406 queries.getFieldID(self.archive, frameSet.ID, self.programme) 407 408 matchingFrames = [frame for frame in availableFrames 409 if frame.fieldID == fieldID] 410 411 updatedFrameSet = \ 412 self._updateFrameSet(frameSet, matchingFrames, availableFrames) 413 414 if updatedFrameSet: 415 updatedFrameSets.append(updatedFrameSet) 416 417 Logger.addMessage( 418 "...%s frame sets will be updated" % len(updatedFrameSets)) 419 420 #### Clean source table and merge log of records to be updated #### 421 422 if updatedFrameSets: 423 # Add an index on frameset ID to speed this process up 424 for index in self._idxInfo[self._sourceTable]: 425 if index.name.endswith("CU7"): 426 self.archive.addIndex(index) 427 428 # With index it is very quick to do 1000s of SQL delete statements 429 Logger.addMessage( 430 "Removing existing sources that are about to be remerged in " 431 "updated frame sets, with new frames...") 432 Logger.addMessage("...number of sources removed: %s" % 433 sum(self.archive.delete(self._sourceTable, 434 "%s=%s" % (self._frameSetIDName, frameSet.ID)) 435 for frameSet in updatedFrameSets)) 436 437 Logger.addMessage( 438 "Temporarily removing merge log records containing frame sets " 439 "to be updated...") 440 Logger.addMessage("...number of rows temporarily removed: %s" % 441 sum(self.archive.delete(self._mergeLog, 442 "%s=%s" % (self._frameSetIDName, frameSet.ID)) 443 for frameSet in updatedFrameSets)) 444 445 #### Determining new frame sets #### 446 447 Logger.addMessage("Creating list of frame sets to be merged...") 448 449 # Initialise with update frame set list 450 newFrameSets = updatedFrameSets 451 452 while availableFrames: 453 masterFrame = availableFrames.pop() 454 if not self.programme.isAutomated(): 455 # Match frame positions within tolerance - UKIDSS shallow only 456 frameSetTol = self.programme.getAttr('frameSetTolerance') 457 frameCentre = (masterFrame.centralRA, masterFrame.centralDec) 458 extNum = (masterFrame.extNum 459 if self._matchExtensions else None) 460 461 matchingFrames = [masterFrame] + astro.matchFrames( 462 availableFrames, raIndex, frameCentre, frameSetTol, extNum, 463 extensionIdx) 464 else: 465 # Match frames using product fieldIDs 466 matchingFrames = [masterFrame] 467 matchingFrames += [frame for frame in availableFrames 468 if frame.fieldID == masterFrame.fieldID] 469 470 newFrameSets.append(self._createFrameSet(nextFrameSetID, 471 matchingFrames, availableFrames)) 472 473 nextFrameSetID += 1 474 475 Logger.addMessage( 476 "Total number of frame sets to be merged: %s" % len(newFrameSets)) 477 478 if self.programmeID == self.sysc.scienceProgs.get("GPS"): 479 dateOfFrame = dict((frame.multiframeID, frame.mjdObs) 480 for frame in self._frameData.itervalues()) 481 482 reallocateEpochs(newFrameSets, dateOfFrame) 483 484 return newFrameSets
485 486 #-------------------------------------------------------------------------- 487
488 - def _modalExtNum(self, frameSet):
489 """ 490 @return: The most common extension number in the frame set. 491 @rtype: int 492 493 @param frameSet: Merge log record of a frame set 494 @type frameSet: list(SourceMerge.FrameSet) 495 496 """ 497 if not frameSet: 498 return dbc.tinyIntDefault() 499 500 # @@TODO: Replace with utils.Bag class 501 extCounts = {} 502 for mfidIdx in self._mfidIndices: 503 extNum = frameSet[mfidIdx+1] 504 if extNum != dbc.tinyIntDefault(): 505 try: 506 extCounts[extNum] += 1 507 except KeyError: 508 extCounts[extNum] = 1 509 510 return max(extCounts.items(), key=itemgetter(1))[0]
511 512 #-------------------------------------------------------------------------- 513
514 - def _pickBestFrame(self, frames, passNum, numEpochs, centrePos):
515 """ 516 Returns the most suitable frame, given a list of frames of the same 517 colour. The frame that is closest to the supplied centre position is 518 returned if only one epoch of observation. Otherwise, for multi-epoch 519 observations, the most recently observed frame is returned. 520 521 @param frames: List of potential frames, in chronological order. 522 @type frames: list(Frame) 523 @param passNum: The pass number (1st, 2nd, ...). 524 @type passNum: int 525 @param numEpochs: The number of passes for this colour. 526 @type numEpochs: int 527 @param centrePos: Frame set centre position in degrees (RA, Dec). 528 @type centrePos: tuple(float, float) 529 530 @return: Frame of most suitable epoch for multiple epoch filters else 531 closest frame to supplied RA and Dec. If there isn't a 532 suitable frame then None is returned. 533 @rtype: Frame 534 535 """ 536 if not frames or not self.isMultiEpoch and passNum > 1: 537 return 538 539 # For single epoch filters pick the nearest frame to the centre 540 if not self.isMultiEpoch or numEpochs is 1: 541 # Calculate separation of each frame from specified central RA/Dec 542 centrePos = tuple(map(math.radians, centrePos)) 543 fPs = (map(math.radians, (frame.centralRA, frame.centralDec)) 544 for frame in frames) 545 546 seps = (astro.angularSep(centrePos, framePos) for framePos in fPs) 547 decSeps = ([frame, sep] for frame, sep in zip(frames, seps)) 548 549 return min(decSeps, key=itemgetter(1))[0] 550 551 # For multiple epoch filters select frames in time order 552 # First test there are enough observations to supply required epoch 553 # i.e. assumes that complete set of frames for every epoch supplied 554 if len(frames) >= passNum: 555 # Pick the most recent frame for the last epoch otherwise pick 556 # from the earliest frame onwards for each epoch (to handle the 557 # case of there being more frames than passes) 558 frame = (frames[-1] if passNum == numEpochs else frames[passNum-1]) 559 560 if passNum is 1: 561 # Always pick the earliest frame for the first epoch 562 return frame 563 564 elif numEpochs > 2 and Cu7.epochTolerance.get(self.programmeID): 565 # @@FIXME: For two epochs we always compare the earliest frame 566 # with the most recent frame to ensure the minimum epoch- 567 # separation criterion is met. However, with more epochs the 568 # existing algorithm assumes that the "middle epochs" are 569 # always sequential from the first - which may not be true for 570 # when a minimum epoch-separation criterion is in place. Would 571 # need to compare all time deltas to determine epoch allocation 572 raise Cu7.CuError("Epoch tolerances are not presently " 573 "supported for more than 2 epochs.") 574 else: 575 # Only valid as a separate epoch if enough time has passed 576 # since the previously selected epoch 577 epochSep = frame.mjdObs - frames[passNum-2].mjdObs 578 if epochSep > Cu7.epochTolerance.get(self.programmeID, 0): 579 return frame
580 581 #-------------------------------------------------------------------------- 582
583 - def _updateFrameSet(self, frameSet, matchingFrames, availableFrames):
584 """ 585 Determines whether the given frame set should be updated with the 586 given frames. If so, the updated frame set is returned. 587 588 @param frameSet: Frame set to update. 589 @type frameSet: SourceMerger.FrameSet 590 @param matchingFrames: Candidate frames of the same position to update 591 the frame set with. 592 @type matchingFrames: list(Frame) 593 @param availableFrames: A complete list of frames currently available 594 for merging. 595 @type availableFrames: list(Frame) 596 597 @return: The updated frame set, or None. 598 @rtype: SourceMerger.FrameSet 599 600 """ 601 if not matchingFrames: 602 return 603 604 newData = None 605 mfidIdx = self._numHeaderCol 606 for filterID, numEpochs in self._filterPasses: 607 # Reduce list to just those frames from the same filter 608 frames = [frame for frame in matchingFrames 609 if frame.filterID == filterID] 610 frames.sort(key=attrgetter("mjdObs")) 611 612 lastFrame = None 613 for passNum in range(1, numEpochs+1): 614 existingFrameID = frameSet[mfidIdx:mfidIdx+2] 615 if existingFrameID[0] != dbc.intDefault(): 616 # Don't replace existing frames, but need to keep list of 617 # earlier epoch frames already in the existing frame set 618 # for the pickBestFrame algorithm to work 619 lastFrame = self._frameData[existingFrameID] 620 else: 621 if passNum > 1 and lastFrame: 622 # Safest to always re-sort than to assume prepend is OK 623 frames.append(lastFrame) 624 frames.sort(key=attrgetter("mjdObs")) 625 lastFrame = None 626 627 bestFrame = self._pickBestFrame(frames, passNum, numEpochs, 628 (frameSet.ra, frameSet.dec)) 629 630 if bestFrame: 631 # @@NOTE: If this line fails examine algorithm closely. 632 availableFrames.remove(bestFrame) 633 newData = [bestFrame.multiframeID, bestFrame.extNum, 634 dbc.yes(), dbc.no()] 635 columns = \ 636 frameSet._fields[mfidIdx:mfidIdx+len(newData)] 637 frameSet = \ 638 frameSet._replace(**dict(zip(columns, newData))) 639 640 mfidIdx += self._numFilterCol 641 642 if newData: 643 # Flag this frame set as new for reseaming and update the cuEventID 644 return frameSet._replace(newFrameSet=dbc.yes(), 645 cuEventID=self.cuEventID)
646 647 #------------------------------------------------------------------------------ 648
649 -def reallocateEpochs(frameSets, dateOfFrame):
650 """ 651 Reallocates first and second epochs of a given filter to minimise the 652 time difference between the dates of the other filters and that of the 653 first epoch of the filter. 654 655 @param frameSets: List of merge log frame sets to alter. 656 @type frameSets: list(SourceMerger.FrameSet) 657 @param dateOfFrame: Dictionary of Julian dates for every multiframe in the 658 given list of frame sets, referenced by multiframeID. 659 @type dateOfFrame: dict(int: float) 660 661 @note: Currently hard-wired for the GPS passband requirements only. 662 663 """ 664 Logger.addMessage("Reallocating second-epoch frames...") 665 666 swapIndices = [] 667 for index, frameSet in enumerate(frameSets): 668 if frameSet.k_2mfID != dbc.intDefault(): 669 numHJ = 2 - [frameSet.hmfID, 670 frameSet.jmfID].count(dbc.intDefault()) 671 if numHJ > 0: 672 avEpoch = (dateOfFrame.get(frameSet.hmfID, 0) + 673 dateOfFrame.get(frameSet.jmfID, 0)) / numHJ 674 # Pick K closest to the average of H and J dates for K_1 675 if abs(dateOfFrame[frameSet.k_1mfID] - avEpoch) > \ 676 abs(dateOfFrame[frameSet.k_2mfID] - avEpoch): 677 swapIndices.append(index) 678 elif dateOfFrame[frameSet.k_1mfID] < dateOfFrame[frameSet.k_2mfID]: 679 # Pick most recent K as K_1 680 swapIndices.append(index) 681 682 if swapIndices: 683 # Determine schema positions of all passband-dependent columns 684 k_1Idx = frameSet._fields.index('k_1mfID') 685 k_2Idx = frameSet._fields.index('k_2mfID') 686 endIdx = k_2Idx + (k_2Idx - k_1Idx) 687 allFields = frameSet._fields[k_1Idx:endIdx] 688 689 for index in swapIndices: 690 # Swap all passband-dependent column data between K_1 and K_2 691 oldSet = frameSets[index] 692 oldK_1data = oldSet[k_1Idx:k_2Idx] 693 oldK_2data = oldSet[k_2Idx:endIdx] 694 newData = dict(zip(allFields, oldK_2data + oldK_1data)) 695 frameSets[index] = oldSet._replace(**newData) 696 697 Logger.addMessage("...%s frames reallocated." % len(swapIndices))
698 699 #------------------------------------------------------------------------------ 700 # Entry point for script. 701 702 # Allow module to be imported as well as executed from the command line 703 if __name__ == '__main__': 704 705 # Define additional command-line interface options for Cu7 706 CLI.progArgs += [ 707 CLI.Argument("begin_date", "05A", isValOK=CLI.isDateOK), 708 CLI.Argument("end_date", "05A", isValOK=CLI.isDateOK)] 709 710 CLI.progOpts += [ 711 CLI.Option('f', 'force', 712 "override completion check of detection quality flagging"), 713 CLI.Option('i', 'ignore_epochs', 714 "ignore multiple epochs for frame set association"), 715 CLI.Option('m', 'mergeonly', 716 "update source table with newly merged sources, " 717 "without reseam"), 718 CLI.Option('n', 'new', "recreate the source table from scratch"), 719 CLI.Option('p', 'prepare', "prepare frame sets for mergelog only"), 720 CLI.Option('r', 'reseam', 721 "don't do any source merging, just reseam the sources"), 722 CLI.Option('x', 'full_seam', 723 "reseam the entire existing source table from scratch"), 724 CLI.Option('z', 'full_seam_cont', 725 "continue reseaming from scratch following a crash")] 726 727 cli = CLI(Cu7, "$Revision: 8615 $") 728 Logger.isVerbose = cli.getOpt('verbose') 729 Logger.addMessage(cli.getProgDetails()) 730 731 if cli.getOpt('mergeonly') and cli.getOpt('reseam'): 732 Logger.addMessage("<Warning> -m/--mergeonly and -r/--reseam options " 733 "are mutually exclusive. Ignoring both...") 734 735 if cli.getOpt('new'): 736 if cli.getOpt('reseam'): 737 raise SystemExit("Error: Option -r/--reseam only is not valid if " 738 "option -n/--new is given to create a new source table!") 739 740 if not CLI.isConfirmed("The -n/--new option will destroy all data " 741 "presently in this programme's source table"): 742 raise SystemExit 743 744 if cli.getOpt('full_seam_cont'): 745 if not cli.getOpt('reseam'): 746 raise SystemExit("Error: If specifying the -z/--full_seam_cont " 747 "option, then the -r/--reseam option should also be chosen.") 748 749 elif cli.getOpt('full_seam'): 750 raise SystemExit("Warning: Both -z/--full_seam_cont and " 751 "-x/--full_seam are specified. Please choose one only.") 752 753 elif not CLI.isConfirmed("You have specified the -z/--full_seam_cont " 754 "option, this should only be used to continue reseaming a source " 755 "table from *scratch* following a crash"): 756 757 raise SystemExit 758 759 cu = Cu7(cli.getArg('programmeID'), cli=cli) 760 try: 761 cu.dateRange = cu.sysc.obsCal.dateRange(cli.getArg("begin_date"), 762 cli.getArg("end_date")) 763 except Exception as error: 764 eType = "Invalid Option" 765 Logger.addExceptionMessage(error, eType) 766 raise SystemExit(eType + ": see log " + cu._log.pathName) 767 768 cu.continueCompleteReseam = cli.getOpt('full_seam_cont') 769 cu.createNewTable = cli.getOpt('new') 770 cu.doCompleteReseam = cli.getOpt('full_seam') 771 cu.enableIngest = cu.enableIngest and not cli.getOpt('test') 772 cu.isMultiEpoch = not cli.getOpt('ignore_epochs') 773 if not (cli.getOpt('mergeonly') and cli.getOpt('reseam')): 774 cu.onlyMerge = cli.getOpt('mergeonly') 775 cu.onlyReseam = cli.getOpt('reseam') 776 777 cu.prepareOnly = cli.getOpt('prepare') 778 cu.skipQualityCheck = cli.getOpt('force') 779 cu.run() 780 781 #------------------------------------------------------------------------------ 782