Package invocations :: Package cu6 :: Module cu6
[hide private]

Source Code for Module invocations.cu6.cu6

   1  #! /usr/bin/env python 
   2  #------------------------------------------------------------------------------ 
   3  #$Id: cu6.py 10053 2013-09-06 17:26:19Z RossCollins $ 
   4  """ 
   5     Invoke CU6. Create/update synoptic source catalogues. 
   6   
   7     @author: N.J.G. Cross 
   8     @org:    WFAU, IfA, University of Edinburgh 
   9   
  10     @newfield contributors: Contributors, Contributors (Alphabetical Order) 
  11     @contributors: R.S. Collins 
  12   
  13     @todo: The CU6 class or at least the class it uses should be polymorphic 
  14            between the correlated and non-correlated synoptic cases 
  15     @todo: spatial/time variations - fit function f(x,y) to each intermediate 
  16     @todo: Recalibration changes. 
  17     @todo: Best match - just nearest object. If motion fitted then come back and 
  18            fix up BM later. Only a few objects. This way can add in new MFs to 
  19            BM without reprocessing whole table. 
  20  """ 
  21  #------------------------------------------------------------------------------ 
  22  from __future__      import division, print_function 
  23  from future_builtins import map, zip 
  24   
  25  from   collections          import Counter, defaultdict, namedtuple 
  26  from   itertools            import groupby 
  27  import math 
  28  import numpy 
  29  from   operator             import attrgetter, itemgetter 
  30  from   Scientific.Functions import LeastSquares as LSQ 
  31   
  32  import wsatools.Astrometry              as astro 
  33  from   wsatools.CLI                 import CLI 
  34  import wsatools.DbConnect.CommonQueries as queries 
  35  from   wsatools.DbConnect.CuSession import CuSession 
  36  import wsatools.DbConnect.DbConstants   as dbc 
  37  from   wsatools.DbConnect.DbSession import Ingester, Join, SelectSQL 
  38  import wsatools.DbConnect.Schema        as schema 
  39  from   wsatools.Logger              import ForLoopMonitor, Logger 
  40  import wsatools.Statistics              as stats 
  41  from   wsatools.SystemConstants     import DepCodes 
  42  import wsatools.Utilities               as utils 
  43  from   wsatools.ProgrammeBuilder    import SynopticSetup 
  44  #------------------------------------------------------------------------------ 
  45   
46 -class Cu6SynopticSetup(SynopticSetup):
47 """ Details of the synoptic database table setup relevant to CU6 only. 48 """ 49 #-------------------------------------------------------------------------- 50
51 - def getFromStr(self, isParts=False):
52 """ Returns the synoptic tables to query for a SQL FROM statement. 53 """ 54 fromStr = "%s AS s, %s AS x" % (self.programme.getSourceTable(), 55 self.programme.getSynopticBestMatchTable()) 56 57 if self.isVarTableCorrel(): 58 if not isParts: 59 fromStr += ", %s AS ss" 60 fromStr %= self.programme.getSynopticSourceTable() 61 62 return fromStr 63 64 if not isParts: 65 fromStr += ", %s AS d" % self.programme.getDetectionTable() 66 67 return fromStr
68 69 #-------------------------------------------------------------------------- 70
71 - def getWhereStr(self, frameSetID, isParts=False):
72 """ Returns the SQL WHERE clause necessary to select on given 73 frameSetID. 74 """ 75 whereStr = "s.sourceID=x.sourceID AND s.frameSetID=%s" % frameSetID 76 if self.isVarTableCorrel(): 77 if not isParts: 78 whereStr = "x.synFrameSetID=ss.synFrameSetID AND " \ 79 "x.synSeqNum=ss.synSeqNum AND " + whereStr 80 81 return whereStr 82 83 if not isParts: 84 whereStr = "x.multiframeID=d.multiframeID AND x.extNum=d.extNum" \ 85 " AND x.seqNum=d.seqNum AND " + whereStr 86 87 return whereStr
88 89 #------------------------------------------------------------------------------ 90
91 -class Cu6(CuSession):
92 """ Create/update synoptic source catalogues for a given database release. 93 """ 94 #-------------------------------------------------------------------------- 95 # Define class constants (access as Cu6.varName) 96 97 intRmsLimit = 3. 98 """ ? 99 """ 100 maxDisp = (0.2 * 4.848e-6) 101 """ ? 102 """ 103 maxDist = 0.008 104 """ Maximum distance in arcmin. 105 """ 106 memFactAstrom = 0.07 107 """ ? 108 """ 109 memFactBestMatch = 0.05 110 """ Proportion of total memory to be taken up by sourceData for max 111 efficiency. 112 """ 113 memFracNoise = 0.07 114 """ Proportion of total memory to be taken up by objData for max efficiency 115 """ 116 memFracVariability = 0.06 # 0.1 117 """ Proportion of total memory to be taken up by objData for max efficiency 118 """ 119 minCount = 3 120 """ Minimum number of matches to be used in statistical analysis 121 """ 122 minNGoodObsVar = 5 123 """ ? 124 """ 125 numObjsBin = 100 126 """ ? 127 """ 128 probVarLimit = 0.9 129 """ ? 130 """ 131 # @todo different chunksize for different parts -- use density to 132 # determine? 133 frameSetChunkSize = 5 # @TODO: THIS IS A HACK 134 """ number of framesets in a chunk to be processed 135 """ 136 # Class parameters - should not be altered 137 cuNum = 6 # Overrides CuSession default 138 _autoCommit = True # Overrides CuSession default 139 _reqCuEventID = True # Overrides CuSession default 140 141 ChunkInfo = namedtuple('ChunkInfo', 142 'frameSetID filterID firstSID part chunkSizeSID shtName') 143 144 Range = namedtuple("Range", 'min max') 145 146 #------------------------------------------------------------------------ 147 # Define public member variable default values (access as obj.varName) 148 # these need to be set from command-line options 149 150 bestAperRange = Range(1, 6) #: ? 151 calcVarOnly = False #: ? 152 createNewTable = False #: Recreate all variability tables from scratch. 153 dateRange = CuSession.sysc.obsCal.dateRange() #: ? 154 frameSetList = None #: ? 155 keepHalfSpaces = False #: Keep half-spaces when recreating tables. 156 maxProdID = None #: Maximum product ID. 157 minProdID = None #: Minimum product ID. 158 motionModel = 'wgtstatic' #: ? 159 oneFrameSet = False #: ? 160 161 #-------------------------------------------------------------------------- 162 # Define private member variable default values (access prohibited) 163 164 _dict1sML = None 165 """ Dictionary of one-second magnitude limits referenced by filter ID. 166 """ 167 _goodBitflags = None #: :type _goodBitflags: list 168 """ List of good quality bit-flags for variability calculation. 169 """ 170 _initSFID = None 171 """ Initial synoptic frameSetID. 172 """ 173 _passbands = None 174 """ List of filter passbands for the current programme as a tuple of filter 175 ID and short name. 176 """ 177 _srcTableIdx = '' 178 """ Hint for index on the source table if it exists. 179 """ 180 _synColours = None 181 """ List of synoptic colours for the current programme as a tuple of 182 filterID1, filterID2, shortname1, shortname2. 183 """ 184 _synPassbands = None 185 """ List of filter passbands that are synoptic for the current programme as 186 a tuple of filter ID and short name. 187 """ 188 _synSetup = None 189 """ Cu6SynopticSetup details for current programme. 190 """ 191 _tableIndices = None 192 """ A dictionary of schema.Index object lists referenced by table name. 193 """ 194 _initialPhtParam = [0.005, 1.0e-8, 1.0e-15] 195 """ A list of initial parameters for the photometric fitting function 196 """ 197 _initialAstParam = [0.005, 1.0e-8, 1.0e-15] 198 """ A list of initial parameters for the astrometric fitting function 199 """ 200 #-------------------------------------------------------------------------- 201
202 - def _onRun(self):
203 """ Run CU6. 204 """ 205 # @TODO: appending - primary / secondary sources? 206 if not self.programme.isDeep(): 207 Logger.addMessage("<Warning> This is not a deep survey. " 208 "Will not create synoptic tables.") 209 return 210 211 self.availMemory = self.sysc.one_kilobyte * utils.getSystemMemoryKb() 212 Logger.addMessage("Querying database for programme details...") 213 self._setProgDetails() 214 215 Logger.addMessage("Parsing schema...") 216 tableSchema = schema.parseTables(self.programme.getSchemaScript()) 217 self._prepareTables(tableSchema) 218 try: 219 ingester = Ingester(self.archive, tableSchema, self.shareFileID) 220 except schema.MismatchError as error: 221 raise Cu6.CuError(error) 222 223 frameSetsForTable = self._getFrameSetDict() 224 225 #### BestMatch Table(s) #### @TODO: This should be a separate method 226 227 # @TODO: Should this choice be database driven? 228 # @TODO: Replace IN with INNER JOIN? 229 if not self.calcVarOnly: 230 self._queryBadEpochSet() 231 232 Logger.addMessage("Removing deprecated frames from the " 233 "AstrometricInfo table...") 234 Logger.addMessage("...%s rows deleted." % 235 self.archive.delete(self.programme.getAstroInfoTable(), 236 "multiframeID IN (%s)" % SelectSQL("multiframeID", "Multiframe", 237 DepCodes.selectDeprecated))) 238 239 if not self.keepHalfSpaces or not self.createNewTable: 240 Logger.addMessage("Calculating half-spaces...") 241 self._calcHalfSpaces() 242 243 for bmTable in self._synSetup.bestMatchTables: 244 Logger.addMessage("Calculating %s table" % bmTable) 245 self._bestObjCalculator(ingester, bmTable, frameSetsForTable) 246 247 # @TODO: Create an Orphan table 248 #self._createOrphanTable() 249 250 # @TODO: only VarFramesetInfo ... 251 # @TODO: REPLACE IN with INNER JOIN?. 252 frameSets = frameSetsForTable[self.programme.getVarFrameSetInfoTable()] 253 if self.oneFrameSet: 254 frameSets = frameSets[-1:] 255 256 if frameSets: 257 Logger.addMessage("Deleting old frame sets from %s..." 258 % self.programme.getVarFrameSetInfoTable()) 259 260 self.archive.delete(self.programme.getVarFrameSetInfoTable(), 261 whereStr="frameSetID IN (%s)" % ", ".join(map(str, frameSets))) 262 263 self.archive.delete(self.programme.getBestAperTable(), 264 whereStr="frameSetID IN (%s)" % ", ".join(map(str, frameSets))) 265 266 # variability statistics. 267 # meanJMag,meanJMagRms,meanJMagMad,numberJObs,nMissingJObs 268 # iws (if more than one band), variableClass ##,mainPeriod,nPeriods 269 # Select good sources from SourceXDetectionBestMatch 270 # for each source find set of stats. 271 # do on a frameSet basis to reduce memory overheads?? 272 273 # now calculate photometric variability. 274 Logger.addMessage("Calculating photometric variability...") 275 self.expLimDict = {} 276 for filterID, _shtName in self._passbands: 277 # @TODO: Use the new queryNumRows() groupBy argument 278 # @FIXME: Do this by frameset and use totExpTime -- OSA 279 280 columns = "njitter*nustep*expTime" 281 tables = ["Multiframe", "ProgrammeFrame"] 282 if self.sysc.isVSA(): 283 tables.append("MultiframeEsoKeys") 284 columns = columns.replace("expTime", "detNdit*detDit") 285 if self.programme.getAttr("epochFrameType") == 'tile': 286 columns = "2*" + columns 287 288 frameTypeSel = queries.getFrameSelection( 289 self.programme.getAttr("epochFrameType"), noDeeps=True, 290 selType='%stack') 291 292 totExpTimeList = self.archive.query( 293 selectStr=columns + ", COUNT(*)", 294 fromStr=Join(tables, ["multiframeID"]), 295 whereStr=frameTypeSel + " AND programmeID=%s AND filterID=%s" 296 % (self.programmeID, filterID), 297 groupBy=columns, 298 orderBy=columns) 299 300 if totExpTimeList: 301 totObs = sum(nObs for etime, nObs in totExpTimeList) 302 totExpTime = totExpTimeList[0][0] 303 for etime, nObs in totExpTimeList: 304 if nObs > 0.1 * totObs: 305 totExpTime = etime 306 307 expML = self.varFilCols.expML.getDefaultValue() 308 if totExpTime > 0: 309 expML = self._dict1sML[filterID] 310 expML += 1.25 * math.log10(totExpTime) 311 312 self.expLimDict[filterID] = expML 313 else: 314 self.expLimDict[filterID] = \ 315 self.varFilCols.expML.getDefaultValue() 316 317 #### BestAper Table #### @TODO: This should be a separate method 318 319 Logger.addMessage("Calculating noise model for %s frame sets..." 320 % len(frameSets)) 321 322 bestAperTable = self.programme.getBestAperTable() 323 outputFile = self.archive.sharePath("%s_%s.csv" 324 % (bestAperTable, self.shareFileID)) 325 326 self.bestAperDict = {} 327 self.phtFit = defaultdict(dict) 328 self.phtNDof = defaultdict(dict) 329 self.astFit = defaultdict(dict) 330 self.astNDof = defaultdict(dict) 331 self.magMxPht = defaultdict(dict) 332 self.magMxAst = defaultdict(dict) 333 chunkedFrameSets = \ 334 list(utils.splitList(frameSets, Cu6.frameSetChunkSize)) 335 336 progress = ForLoopMonitor(chunkedFrameSets) 337 for frameSetChunks in chunkedFrameSets: 338 progress.preMessage("Preparing chunk") 339 self._calcBestAperTable(frameSetChunks, outputFile) 340 ingester.ingestTable(bestAperTable, outputFile, isCsv=True) 341 342 Logger.addMessage("Calculating default noise models where not enough " 343 "data", alwaysLog=False) 344 345 # Do not insert, but update with defaults... 346 # @TODO: make sure this is all frameSets in current table 347 allFrameSets = \ 348 self.archive.query("frameSetID", self.programme.getMergeLogTable()) 349 350 frameSets = self._getFinalFrameSets(allFrameSets) 351 self._fillInDefaultNoise(frameSets) 352 353 #### Variability Table #### @TODO: This should be a separate method 354 355 varTable = self.programme.getVariabilityTable() 356 frameSets = frameSetsForTable[varTable] 357 if self.oneFrameSet: 358 frameSets = frameSets[-1:] 359 360 Logger.addMessage("Deleting old frame sets from %s..." % varTable) 361 if frameSets: 362 whereStr = "frameSetID IN (%s)" % ", ".join(map(str, frameSets)) 363 self.archive.delete(varTable, whereStr) 364 365 # @@TODO: Make this a binary file for efficiency. 366 outputFile = \ 367 self.archive.sharePath("%s_%s.csv" % (varTable, self.shareFileID)) 368 369 Logger.addMessage("Variability table will be written to %s" % outputFile) 370 Logger.addMessage("Calculating variability statistics for %s frame " 371 "sets..." % len(frameSets)) 372 373 chunkedFrameSets = \ 374 list(utils.splitList(frameSets, Cu6.frameSetChunkSize)) 375 376 progress = ForLoopMonitor(chunkedFrameSets) 377 for frameSetChunks in chunkedFrameSets: 378 progress.preMessage("Preparing chunk") 379 self._calcVariabilityTable(frameSetChunks, outputFile) 380 ingester.ingestTable(varTable, outputFile, isCsv=True)
381 382 #-------------------------------------------------------------------------- 383
384 - def _setProgDetails(self):
385 """ Sets class member variables for the current programme details. 386 """ 387 self._initSFID = self.programmeID * 65536L * 65536L 388 self._setGoodQualityBitflags() 389 self._synSetup = Cu6SynopticSetup(self.programme) 390 self._passbands = self.archive.query( 391 selectStr="Filter.filterID, shortname, isSynoptic", 392 fromStr=Join(["Filter", "RequiredFilters"], "filterID"), 393 whereStr="programmeID=%s" % self.programmeID) 394 395 self._synPassbands = [(fID, name) 396 for fID, name, isSynoptic in self._passbands if isSynoptic] 397 398 self._passbands = [(fID, name) 399 for fID, name, isSynoptic in self._passbands] 400 401 self._dict1sML = \ 402 dict(self.archive.query("filterID, oneSecMLVg", "Filter")) 403 404 if self._synSetup.isVarTableCorrel(): 405 self._synColours = [] 406 for ((filterID1, band1), (filterID2, band2)) \ 407 in zip(self._synPassbands, self._synPassbands[1:]): 408 if filterID2 <= 5: 409 self._synColours.append( 410 (filterID1, filterID2, band1, band2)) 411 412 minProd, maxProd = self.archive.query("MIN(productID), MAX(productID)", 413 "Required%s" % self.programme.getAttr("sourceProdType").title(), 414 "programmeID=%s" % self.programmeID, firstOnly=True) 415 416 self.minProdID = max(self.minProdID or minProd, minProd) 417 self.maxProdID = min(self.maxProdID or maxProd, maxProd)
418 419 #-------------------------------------------------------------------------- 420
421 - def _prepareTables(self, tableSchema):
422 """ Checks ingest db tables exist and creates new ones if necessary. 423 """ 424 try: 425 astroInfoSchema, bestMatchSchema, varInfoSchema, bestAperSchema, \ 426 varSchema = schema.parseTables(self.programme.getSchemaScript(), 427 [self.programme.getAstroInfoTable(), 428 self.programme.getSynopticBestMatchTable(), 429 self.programme.getVarFrameSetInfoTable(), 430 self.programme.getBestAperTable(), 431 self.programme.getVariabilityTable()]) 432 433 addBmTable = [bmTable for bmTable in self._synSetup.bestMatchTables 434 if bmTable != self.programme.getSynopticBestMatchTable()] 435 436 if addBmTable: 437 addBestMatchSchema = \ 438 schema.parseTables(self.programme.getSchemaScript(), 439 addBmTable)[0] 440 441 except schema.Table.NotFoundError as error: 442 raise Cu6.CuError(error) 443 444 self._splitVarSchema(varInfoSchema, varSchema) 445 self._tableIndices = schema.parseIndices(self.sysc.nsIndexScript 446 if self.programme.isNonSurvey() else self.sysc.indexScript) 447 448 Logger.addMessage("Creating new empty database tables..." 449 if self.createNewTable else "Ensuring database tables exist...") 450 451 self.archive.createTable(astroInfoSchema, tableSchema, 452 overWrite=self.createNewTable and not self.keepHalfSpaces) 453 454 self.archive.createTable(bestMatchSchema, tableSchema, 455 overWrite=self.createNewTable) 456 457 if addBmTable: 458 self.archive.createTable(addBestMatchSchema, tableSchema, 459 overWrite=self.createNewTable) 460 461 self.archive.createTable(varInfoSchema, tableSchema, 462 overWrite=self.createNewTable) 463 464 self.archive.createTable(bestAperSchema, tableSchema, 465 overWrite=self.createNewTable) 466 467 self.archive.createTable(varSchema, tableSchema, 468 overWrite=self.createNewTable)
469 470 #-------------------------------------------------------------------------- 471
472 - def _calcBestAperTable(self, frameSetChunks, outputFile):
473 """ Calculates noise model for the given frame sets and saves the data 474 to the given CSV file for bulk ingest into the progBestAper table, 475 as well as updating the progVarFrameSetInfo table. 476 """ 477 varFSRowData = [] 478 for frameSetID in frameSetChunks: 479 Logger.addMessage("Calculating noise model for frameSetID=%s" 480 % frameSetID, alwaysLog=False) 481 482 # This ensures the same execution plan is always followed. 483 self.archive.freeProcCache() 484 485 # Do by parts here 486 nParts = self._findNPartsVar(frameSetID, isInitStats=True) 487 if not nParts: 488 Logger.addMessage("<Info> No good epochs for frame set %s. " 489 "Skipping..." % frameSetID) 490 continue 491 else: 492 Logger.addMessage("Splitting frame set processing into %s " 493 "parts" % nParts, alwaysLog=False) 494 495 firstSID, rangeSID = self._querySourceIdRange(frameSetID) 496 chunkSizeSID = rangeSID // nParts 497 498 # First calculate noise model for frame set in each band. 499 self.meanRmsList = {} 500 self.meanAstromList = {} 501 rowData = [frameSetID, self.cuEventID, self.motionModel] 502 for filterID, shtName in self._synPassbands: 503 self.shtName = shtName 504 self.meanRmsList[filterID] = [] 505 self.meanAstromList[filterID] = [] 506 for part in range(nParts): 507 Logger.addMessage("Part %s of %s..." % (part + 1, nParts), 508 alwaysLog=False) 509 510 chunkInfo = Cu6.ChunkInfo(*[frameSetID, filterID, firstSID, 511 part, chunkSizeSID, shtName]) 512 513 # Calculate initial statistics. 514 self._calcInitialStats(chunkInfo) 515 516 Logger.addMessage("...collected data...", alwaysLog=False) 517 518 # Calculate noise model 519 Logger.addMessage("...calculating noise model...", 520 alwaysLog=False) 521 522 self._calcNoiseModel(frameSetID, filterID) 523 rowData.extend(self.astFit[frameSetID][filterID][0]) 524 rowData.extend((self.astFit[frameSetID][filterID][1], 525 self.astNDof[frameSetID][filterID], 0)) 526 rowData.extend(self.phtFit[frameSetID][filterID][0]) 527 rowData.extend((self.phtFit[frameSetID][filterID][1], 528 self.phtNDof[frameSetID][filterID], 0)) 529 rowData.append(self.expLimDict[filterID]) 530 531 varFSRowData.append(rowData) 532 self._writeToBestAperFile(outputFile, frameSetID) 533 534 self._updateVarFrameSetInfo(varFSRowData)
535 536 #-------------------------------------------------------------------------- 537
538 - def _calcHalfSpaces(self):
539 """ Calculates half-spaces and updates database with new values. 540 """ 541 reqProducts = queries.getRequiredProductIDs(self.programme, 542 self.minProdID, self.maxProdID) 543 544 productType = self.programme.getAttr("sourceProdType") 545 if self.programme.getAttr("epochFrameType") \ 546 != self.programme.getAttr("sourceProdType"): 547 548 # @TODO: Replace IN with INNER JOIN? 549 prodLinks = self.archive.query("intProductID", "ProductLinks", 550 whereStr="combiProdType='%s' AND intProdType='%s' and " 551 "combiProductID IN (%s) AND programmeID=%s" 552 % (self.programme.getAttr("sourceProdType"), 553 self.programme.getAttr("epochFrameType"), 554 ','.join(map(str, reqProducts)), 555 self.programmeID)) 556 557 if prodLinks: 558 reqProducts = prodLinks 559 productType = self.programme.getAttr("epochFrameType") 560 561 intFrameType = self.programme.getAttr("epochFrameType") 562 563 # check AstroInfo table. 564 intMFIDs = queries.getIntStacks(self.archive, self.programmeID, 565 reqProducts, productType, self.dateRange, allNuStep=True, 566 intFrameType=intFrameType) 567 568 frameTypes = queries.getFrameSelection(intFrameType, noDeeps=True) 569 570 astInfoMFIDs = self.archive.query( 571 selectStr="DISTINCT Multiframe.multiframeID", 572 fromStr=Join([self.programme.getAstroInfoTable(), "Multiframe"], 573 "multiframeID"), 574 whereStr=frameTypes + " AND isComplete=1 AND deprecated=0") 575 576 if intMFIDs - set(astInfoMFIDs): 577 self._updateHalfSpaces(frameTypes)
578 579 #-------------------------------------------------------------------------- 580
581 - def _updateVarFrameSetInfo(self, varFSRowData):
582 """ Ingests given data into the VarFrameSetInfo table. 583 """ 584 varFrameSetInfo = self.programme.getVarFrameSetInfoTable() 585 Logger.addMessage("Updating %s and %s table with %s frame sets " 586 % (varFrameSetInfo, self.programme.getBestAperTable(), 587 len(varFSRowData))) 588 589 for rowData in varFSRowData: 590 self.archive.insertData(varFrameSetInfo, rowData)
591 592 #-------------------------------------------------------------------------- 593
594 - def _calcVariabilityTable(self, frameSetChunks, outputFile):
595 """ Calculates variability statistics for the given frame sets and 596 saves the data to the given CSV file for bulk ingest into the 597 progVariability table. 598 """ 599 outFile = file(outputFile, "w") 600 for frameSetID in frameSetChunks: 601 self.archive.freeProcCache() 602 603 # Do by parts here 604 nParts = self._findNPartsVar(frameSetID, isInitStats=False) 605 firstSID, rangeSID = self._querySourceIdRange(frameSetID) 606 chunkSizeSID = rangeSID // nParts 607 Logger.addMessage("Calculating variability stats for frameSetID=%s" 608 % frameSetID, alwaysLog=False) 609 610 self.sourceStatsDict = {} 611 self.bestAperDict = self._queryBestAperTable(frameSetID) 612 for part in range(nParts): 613 Logger.addMessage("Part %s of %s..." % (part + 1, nParts), 614 alwaysLog=False) 615 616 # TODO: Group by count like CU19 617 chunkSQL = ("sourceID BETWEEN %s AND %s" 618 % (firstSID + part * chunkSizeSID, 619 firstSID + (part + 1) * chunkSizeSID - 1)) 620 621 # Calculate astrometry 622 astroOfSource = self._calcAstrometry(frameSetID, chunkSQL) 623 624 # Calculate correlated stats if need be. 625 correlOfSource = (self._calcCorrelatedStats(chunkSQL) 626 if self._synSetup.isVarTableCorrel() else None) 627 628 # Now calculate photometric stats for each filter 629 Logger.addMessage("...calculating photometric statistics...", 630 alwaysLog=False) 631 632 # best total exposure time is lowest (largest errors), 633 # but exclude any which make up less than 10% of total. 634 635 for filterID, shtName in self._synPassbands: 636 self.fltID = filterID 637 self.shtName = shtName 638 Logger.addMessage("...for filter %s" % shtName, 639 alwaysLog=False) 640 641 chunkInfo = Cu6.ChunkInfo(*[frameSetID, filterID, firstSID, 642 part, chunkSizeSID, shtName]) 643 644 self.sourceStatsDict[filterID] = {} 645 self._calcFullStats(chunkInfo) 646 647 # Calculate class 648 uniqueSources = self._queryUniqueSources(chunkSQL) 649 self._calcVariableClass(uniqueSources) 650 651 # Now write out. 652 self._writeToVariability(outFile, frameSetID, uniqueSources, 653 astroOfSource, correlOfSource)
654 655 #-------------------------------------------------------------------------- 656
657 - def _writeToVariability(self, outFile, frameSetID, uniqueSources, 658 astroOfSource, correlOfSource):
659 """ Writes CSV file to ingest into the Variability table. 660 """ 661 astDefValues = [dflt.getDefaultValue() for dflt in self.astCols] 662 if self._synSetup.isVarTableCorrel(): 663 photDefValues = [self.corCols.iWS.getDefaultValue()] 664 photDefValues *= len(self._synColours) 665 666 # Basic + astrometry 667 for sourceID in uniqueSources: 668 row = (sourceID, self.cuEventID, frameSetID) 669 row += tuple(astroOfSource.get(sourceID, astDefValues)) 670 fmtStr = getWriteFormat(self.bscCols, 671 colList=['sourceID', 'cuEventID', 'frameSetID'], noBegComma=True) 672 fmtStr += getWriteFormat(self.astCols) 673 outFile.write(fmtStr % row) 674 675 # photometric cols 676 # @@TODO: Can't we just do a ','.join(map(str, 677 # or csv.join ?? In fact, can't we use CSV module? 678 for filterID, _shtName in self._synPassbands: 679 try: 680 row = tuple(self.sourceStatsDict[filterID][sourceID]) 681 except KeyError: # No objects in this filterID 682 row = [col.getDefaultValue() for col in self.phtCols] 683 684 fmtStr = getWriteFormat(self.phtCols) 685 outFile.write(fmtStr % row) 686 687 if self._synSetup.isVarTableCorrel(): 688 correlList = correlOfSource.get(sourceID, photDefValues) 689 fmtStr = getWriteFormat(self.corCols) 690 # @@TODO: Can't we just loop through correlList?? 691 for cor, _col in zip(correlList, self._synColours): 692 outFile.write(fmtStr % cor) 693 694 # @@TODO - add in astrometric and correlated stats into 695 # classification 696 fmtStr = getWriteFormat(self.bscCols, ['variableClass']) 697 outFile.write(fmtStr % self.varClassDict.get(sourceID)) 698 outFile.write("\n")
699 700 #-------------------------------------------------------------------------- 701
702 - def _bestObjCalculator(self, ingester, bmTable, frameSetsForTable):
703 """ 704 Method to calculate the most likely object in each frame to correspond 705 to the source. 706 707 @todo: Sort out definitions of member variables. 708 709 """ 710 # Fit motion in all pass-bands together!!!! 711 # 712 # Make sure detection table has default rows for each 713 # mfID,eNum combination 714 bmAttr = self._synSetup.getBMTableAttr(bmTable) 715 bmatchCsvFile = self.archive.sharePath("%s_%s.csv" % 716 (bmTable, self.shareFileID)) 717 718 Logger.addMessage("Writing table to %s" % bmatchCsvFile) 719 720 # add indices 721 indexName, indexTable = bmAttr.secObjID, bmAttr.secName 722 for idx in self._tableIndices[indexTable]: 723 if idx.name.endswith(indexName): 724 if self.archive.addIndex(idx): 725 Logger.addMessage("Index created.") 726 727 # and source table index 728 for idx in self._tableIndices[self.programme.getSourceTable()]: 729 if idx.name.endswith("CU7"): 730 self._srcTableIdx = " WITH (INDEX (%s))" % idx 731 if self.archive.addIndex(idx): 732 Logger.addMessage("Index created.") 733 734 frameSets = frameSetsForTable[bmTable] 735 if self.oneFrameSet: 736 frameSets = frameSets[-1:] 737 738 frameSets = self._prepareFrameSets(frameSets, bmAttr) 739 740 # Do all astrometric variance later 741 Logger.addMessage( 742 "Finding best matches for %s frame sets..." % len(frameSets)) 743 744 chunkedFrameSets = \ 745 list(utils.splitList(frameSets, Cu6.frameSetChunkSize)) 746 747 progress = ForLoopMonitor(chunkedFrameSets) 748 for frameSetChunks in chunkedFrameSets: 749 progress.preMessage("Preparing chunk") 750 for frameSetID in frameSetChunks: 751 # Get frames - synFrameSet list? 752 # multiframe list but need links from synFrameSet to multiframe 753 allFrames = self._queryPossFrames(frameSetID, bmAttr) 754 Logger.addMessage("Finding best matches for frameSetID=%s " 755 "(%s frames)" % (frameSetID, len(allFrames)), 756 alwaysLog=False) 757 758 if not allFrames: 759 Logger.addMessage("<WARNING> Frame set has no frames!" 760 " frameSetID=%s" % frameSetID) 761 continue 762 763 detTables = self.programme.getMonthlyDetectionTables( 764 self.dateRange, excludeDeeps=True, 765 mfIDs=map(attrgetter("multiframeID"), allFrames)) 766 767 # @TODO: Do not include big tables in queryNParts. 768 neighbourTable = bmTable.replace("BestMatch", "") 769 tempTable = schema.Table() 770 tempTable.name = neighbourTable + "Temp" 771 fromStrNPrt = "%s AS s, %s AS x" \ 772 % (self.programme.getSourceTable(), neighbourTable) 773 774 whereStrNPrt = ("s.sourceID=x.masterObjID AND s.frameSetID=%d" 775 " AND x.distanceMins<%s" % (frameSetID, Cu6.maxDist) + 776 " AND (s.priOrSec=0 OR s.priOrSec=s.frameSetID)") 777 778 if bmAttr.isCorrel: 779 selectStr = ("(s.sourceID-s.frameSetID) AS sID," 780 "ss.synFrameSetID,ss.synSeqNum,ss.synopticID as objID," 781 "s.ra,s.dec,s.cx,s.cy,s.cz,x.distanceMins") 782 783 fromStr = fromStrNPrt + ", %s AS ss" \ 784 % self.programme.getSynopticSourceTable() 785 786 whereStr = whereStrNPrt + " AND x.slaveObjID=ss.synopticID" 787 788 else: 789 selectSidStr = "(sourceID - frameSetID) AS sID" 790 selectFrameStr = "r.multiframeID, extNum, seqNum, objID" 791 selectSrcStr = "ra, dec, cx, cy, cz, distanceMins" 792 tempTable.columns = \ 793 ', '.join([selectSidStr, selectSrcStr, "slaveObjID"]) 794 795 selectStr = \ 796 ', '.join([selectSidStr, selectFrameStr, selectSrcStr]) 797 798 selectByMonthStr = \ 799 ', '.join(["sID", selectFrameStr, selectSrcStr]) 800 801 fromStr = tempTable.name + " AS X" 802 whereStr = "x.slaveObjID=r.objID" \ 803 " AND r.%s" % DepCodes.selectNonDeprecated 804 805 if len(detTables) is 1 and 'Raw20' not in detTables[0]: 806 fromStr += ", Multiframe AS mf" 807 whereStr += " AND mf.multiframeID=r.multiframeID" \ 808 " AND mf.frameType NOT LIKE '%deep%'" 809 810 nParts = \ 811 self._queryNumParts(selectStr, fromStrNPrt, whereStrNPrt) 812 813 if not nParts: 814 Logger.addMessage("<Info> No good epochs for frameSet %s. " 815 "Skipping..." % frameSetID) 816 else: 817 Logger.addMessage("Splitting frame set processing into %s " 818 "parts" % nParts, alwaysLog=False) 819 820 firstSID, rangeSID = self._querySourceIdRange(frameSetID) 821 chunkSizeSID = rangeSID // nParts 822 823 for part in range(nParts): 824 Logger.addMessage("Part %s of %s..." 825 % (part + 1, nParts), alwaysLog=False) 826 827 sourceSel = " AND sourceID BETWEEN %s AND %s" \ 828 % (firstSID + part * chunkSizeSID, 829 firstSID + (part + 1) * chunkSizeSID - 1) 830 831 Logger.addMessage("Getting sources...", alwaysLog=False) 832 if bmAttr.isCorrel: 833 sourceData = self._querySources(selectStr, fromStr, 834 whereStr + sourceSel, orderBy="s.sourceID, " 835 "ss.synFrameSetID, x.distanceMins") 836 837 else: 838 # @TODO: Look into helpful primary keys, indices 839 Logger.addMessage("Creating %s..." % tempTable, 840 alwaysLog=False) 841 842 # Ensure it doesn't exist 843 self.archive.dropTable(tempTable, []) 844 845 numObjs = self.archive.copyTable(fromStrNPrt, 846 tempTable, tempTable.columns, 847 where=whereStrNPrt + sourceSel) 848 849 Logger.addMessage("Inserted %s rows into %s" 850 % (numObjs, tempTable), alwaysLog=False) 851 852 try: 853 sourceData = [] 854 for detTable, mfIdRange in detTables: 855 sourceData += self._querySources( 856 selectByMonthStr, 857 fromStr + ", %s AS r" % detTable, 858 whereStr + mfIdRange.replace( 859 "[multiframeID]", "r.multiframeID")) 860 861 finally: 862 self.archive.dropTable(tempTable, []) 863 864 sourceData.sort(key=attrgetter("distanceMins")) 865 sourceData.sort(key=attrgetter("multiframeID")) 866 sourceData.sort(key=attrgetter("sID")) 867 868 Logger.addMessage("Matching...", alwaysLog=False) 869 self._findBestMatches(frameSetID, sourceData, allFrames, 870 bmatchCsvFile, bmAttr.isCorrel) 871 872 # Write out... in multiple frame set batches? 873 ingester.ingestTable(bmTable, bmatchCsvFile, isCsv=True) 874 875 Logger.addMessage("Finished creating table %s" % bmTable)
876 877 #-------------------------------------------------------------------------- 878
879 - def _calcAstrometry(self, frameSetID, chunkSQL):
880 """ ? 881 """ 882 Logger.addMessage("Calculating astrometric rms and fits", 883 alwaysLog=False) 884 885 astromData = self._queryAstromData(chunkSQL) 886 if not astromData: 887 return {} 888 889 bestFitDataDict = {} 890 if self._synSetup.isVarTableCorrel(): 891 # Use magnitude info to get astrometric error 892 nAttribPerBand = 6 if self.sysc.isVSA() else 5 893 nOffsets = 4 if self.sysc.isVSA() else 5 894 astromData = numpy.array(astromData) 895 sourceID = astromData[0][0] 896 xList, yList, zList, errList = [], [], [], [] 897 Logger.addMessage("Got data", alwaysLog=False) 898 for data in astromData: 899 sID, cx, cy, cz = data[0], data[1], data[2], data[3] 900 xl, yl, zl, el = [], [], [], [] 901 for ii, (filterID, _band) in enumerate(self._passbands): 902 ppErr = data[nOffsets + nAttribPerBand * ii] 903 seqNo = data[nOffsets + 1 + nAttribPerBand * ii] 904 xi = data[nOffsets + 2 + nAttribPerBand * ii] 905 eta = data[nOffsets + 3 + nAttribPerBand * ii] 906 mag = data[nOffsets + 4 + nAttribPerBand * ii] 907 908 # if self.sysc.isVSA(): mjd=data[nOffsets+5+nAttribPerBand*ii] 909 if ppErr == 0 and seqNo > 0 and abs(xi) < Cu6.maxDisp \ 910 and abs(eta) < Cu6.maxDisp and cx >= -1 \ 911 and cy >= -1 and cz >= -1: 912 913 zaxdisp = math.hypot(cx, cy) 914 xl.append(cx - cy * xi - cx * cz * eta / zaxdisp) 915 yl.append(cy + cx * xi - cy * cz * eta / zaxdisp) 916 zl.append(cz + zaxdisp * eta) 917 el.append(math.radians(self._expRmsModel(filterID, 918 self.astFit[frameSetID][filterID][0], mag, 919 self.magMxAst[frameSetID][filterID]) / 3600)) 920 921 if sID == sourceID: 922 xList.extend(xl) 923 yList.extend(yl) 924 zList.extend(zl) 925 errList.extend(el) 926 927 elif sID != sourceID: 928 bestFitDataDict[sourceID] = \ 929 self._calcAstromFit(xList, yList, zList, errList) 930 931 sourceID = sID 932 xList = xl 933 yList = yl 934 zList = zl 935 errList = el 936 937 else: # Use magnitude data to get astrometric error 938 sourceID = astromData[0][0] 939 xList, yList, zList, errList = [], [], [], [] 940 Logger.addMessage("Got data", alwaysLog=False) 941 for sID, _mjd, cx, cy, cz, mag, filterID in astromData: 942 if sID == sourceID: 943 xList.append(cx) 944 yList.append(cy) 945 zList.append(cz) 946 errList.append(math.radians(self._expRmsModel(filterID, 947 self.astFit[frameSetID][filterID][0], mag, 948 self.magMxAst[frameSetID][filterID]) / 3600)) 949 950 elif sID != sourceID: 951 bestFitDataDict[sourceID] = \ 952 self._calcAstromFit(xList, yList, zList, errList) 953 954 sourceID = sID 955 xList, yList, zList = [cx], [cy], [cz] 956 errList = [math.radians(self._expRmsModel(filterID, 957 self.astFit[frameSetID][filterID][0], mag, 958 self.magMxAst[frameSetID][filterID]) / 3600)] 959 960 bestFitDataDict[sourceID] = \ 961 self._calcAstromFit(xList, yList, zList, errList) 962 963 return bestFitDataDict
964 965 #-------------------------------------------------------------------------- 966
967 - def _calcFullStats(self, chunkInfo):
968 """ This calculates all the photometric stats after the noise model has 969 already been calculated. 970 """ 971 uniqueSources, objData, numMissingOfSrc, goodObjData, goodObsOfSrc, \ 972 flaggedObsOfSrc = self._queryObjData(chunkInfo) 973 974 Logger.addMessage("...collected data...", alwaysLog=False) 975 976 # Don't want sourceStatsDict 977 magDataIndex = -1 978 # @@TODO: change this message... 979 Logger.addMessage("...About to sort through...", alwaysLog=False) 980 beginVal = 0 981 beginValgd = 0 982 outputList = [] 983 chisqArray = numpy.zeros([len(uniqueSources)], numpy.float64) 984 ndofArray = numpy.ones([len(uniqueSources)], numpy.int32) 985 sigVarArray = numpy.zeros([len(uniqueSources)], numpy.float32) 986 987 for ii, sourceID in enumerate(uniqueSources): 988 numGoodObs = goodObsOfSrc[sourceID] 989 numFlagObs = flaggedObsOfSrc[sourceID] 990 if any([numGoodObs, numFlagObs]): 991 endVal = beginVal + numGoodObs + numFlagObs 992 # Use objData because you want all observations, not just 993 # good ones. What about missing ones? 994 mjdObs = numpy.sort(objData[beginVal:endVal]) 995 totPeriod = float(mjdObs[mjdObs.size - 1]) - float(mjdObs[0]) 996 if numGoodObs + numFlagObs > 1: 997 deltaT = numpy.subtract(mjdObs[1:], mjdObs[:-1]) 998 minCad = numpy.min(deltaT) 999 medCad = numpy.median(deltaT) 1000 maxCad = numpy.max(deltaT) 1001 else: 1002 minCad = self.phtCols.minCadence.getDefaultValue() 1003 medCad = self.phtCols.medCadence.getDefaultValue() 1004 maxCad = self.phtCols.maxCadence.getDefaultValue() 1005 else: 1006 totPeriod = self.phtCols.totalPeriod.getDefaultValue() 1007 minCad = self.phtCols.minCadence.getDefaultValue() 1008 medCad = self.phtCols.medCadence.getDefaultValue() 1009 maxCad = self.phtCols.maxCadence.getDefaultValue() 1010 1011 # calc simple stats 1012 if numGoodObs > 1 \ 1013 and self.bestAperDict[(sourceID, chunkInfo.filterID)] > 0: 1014 1015 bestAper = self.bestAperDict[(sourceID, chunkInfo.filterID)] 1016 #aperMagArray=goodObjData[beginValgd:(beginValgd+numGoodObs), 1017 # magDataIndex+self.bestAperRange[0]:magDataIndex+self.bestAperRange[1]] 1018 #bestAperData=numpy.transpose( 1019 # aperMagArray)[bestAper-self.bestAperRange[0]][ 1020 # numpy.nonzero( 1021 # numpy.where(numpy.greater(numpy.transpose( 1022 # aperMagArray)[bestAper-self.bestAperRange[0]],0.), 1023 # numpy.transpose( 1024 # aperMagArray)[bestAper-self.bestAperRange[0]],0))] 1025 bestAperData = goodObjData[beginValgd:(beginValgd + numGoodObs), 1026 magDataIndex + bestAper] 1027 # @TODO: Add attribute nDefaultMag 1028 # @TODO: Recalculate min without defaults - others too? 1029 med, mad = stats.clippedMADHistogram( 1030 bestAperData, minSd=0.0001, retValues="median,MAD") 1031 1032 mean = stats.calcMean(bestAperData) 1033 std = stats.calcSd(bestAperData) 1034 skew = stats.calcSkew(bestAperData) 1035 minMag = numpy.min(bestAperData) 1036 maxMag = numpy.max(bestAperData) 1037 std_n0 = numpy.where(numpy.greater(std, 0), std, 1) 1038 nSigma = numpy.where(numpy.greater(std, 0), 1039 (bestAperData - mean) / std_n0, 0) 1040 1041 ngt3sig = numpy.nonzero(numpy.greater(numpy.abs(nSigma), 3))[0]\ 1042 .size 1043 1044 ndofArray[ii] = numGoodObs - 1 # (bestAperData.size-1.) 1045 isValid = (self.phtFit[chunkInfo.frameSetID] 1046 [chunkInfo.filterID][0][0] 1047 > 0 1048 and 1049 self.phtFit[chunkInfo.frameSetID] 1050 [chunkInfo.filterID][0][1] 1051 > self.varFilCols.bStratPht.getDefaultValue() 1052 and 1053 self.phtFit[chunkInfo.frameSetID] 1054 [chunkInfo.filterID][0][2] 1055 > self.varFilCols.cStratPht.getDefaultValue()) 1056 1057 expRms = (self.phtCols.ExpRms.getDefaultValue() 1058 if not isValid else 1059 self._expRmsModel(chunkInfo.filterID, 1060 self.phtFit[chunkInfo.frameSetID] 1061 [chunkInfo.filterID][0], 1062 mean, self.magMxPht[chunkInfo.frameSetID] 1063 [chunkInfo.filterID])) 1064 1065 expRms_n0 = numpy.where(numpy.greater(expRms, 0.), expRms, 1.) 1066 chisqArray[ii] = numpy.where(numpy.greater(expRms, 0.), 1067 numpy.sum(numpy.power(numpy.divide( 1068 numpy.subtract(bestAperData, mean), 1069 expRms_n0), 2)) / ndofArray[ii], 0.) 1070 intRms = numpy.where( 1071 numpy.logical_and( 1072 numpy.greater_equal(std, expRms), 1073 numpy.greater(expRms, 0.)), numpy.sqrt(numpy.abs( 1074 numpy.subtract(numpy.power(std_n0, 2), 1075 numpy.power(expRms_n0, 2)))), 1076 self.phtCols.IntRms.getDefaultValue()) 1077 sigVarArray[ii] = intRms / expRms if expRms > 0. else 0 1078 1079 else: 1080 bestAper = self.phtCols.bestAper.getDefaultValue() 1081 ngt3sig = self.phtCols.Ngt3sig.getDefaultValue() 1082 mean = self.phtCols.meanMag.getDefaultValue() 1083 med = self.phtCols.medianMag.getDefaultValue() 1084 std = self.phtCols.MagRms.getDefaultValue() 1085 mad = self.phtCols.MagMAD.getDefaultValue() 1086 skew = self.phtCols.skewness.getDefaultValue() 1087 minMag = self.phtCols.MinMag.getDefaultValue() 1088 maxMag = self.phtCols.MaxMag.getDefaultValue() 1089 ngt3sig = self.phtCols.Ngt3sig.getDefaultValue() 1090 expRms = self.phtCols.ExpRms.getDefaultValue() 1091 intRms = self.phtCols.IntRms.getDefaultValue() 1092 ndofArray[ii] = 0 1093 chisqArray[ii] = 0 1094 sigVarArray[ii] = 0 1095 1096 outputList.append( 1097 self.PhtTuple(numGoodObs, numFlagObs, numMissingOfSrc[sourceID], 1098 bestAper, minCad, medCad, maxCad, totPeriod, float(mean), med, 1099 std, mad, skew, minMag, maxMag, ngt3sig, expRms, intRms, 1100 chisqArray[ii], ndofArray[ii], 1101 self.phtCols.probVar.getDefaultValue(), 1102 self.phtCols.VarClass.getDefaultValue())) 1103 1104 beginVal += numGoodObs + numFlagObs 1105 beginValgd += numGoodObs 1106 #else: 1107 # chisqArray[ii]=0. 1108 # ndofArray[ii]=0 1109 1110 # No data --- default 1111 # @TODO: weird rounding errors 1112 pVar = numpy.where(ndofArray > 0, 1113 stats.probChisq(chisqArray, numpy.where(ndofArray > 0, ndofArray, 1)), 1114 self.phtCols.probVar.getDefaultValue()) 1115 1116 varClass = numpy.logical_and( 1117 numpy.greater(sigVarArray, Cu6.intRmsLimit), 1118 numpy.greater(pVar, Cu6.probVarLimit)).astype(numpy.int32) 1119 1120 for ii, sourceID in enumerate(uniqueSources): 1121 photTuple = outputList[ii] 1122 if not (photTuple.nGoodObs > 1 1123 and self.bestAperDict[(sourceID, chunkInfo.filterID)] > 0): 1124 1125 chisqArray[ii] = self.phtCols.chiSqpd.getDefaultValue() 1126 ndofArray[ii] = self.phtCols.ndof.getDefaultValue() 1127 pVar[ii] = self.phtCols.probVar.getDefaultValue() 1128 varClass[ii] = self.phtCols.VarClass.getDefaultValue() 1129 1130 self.sourceStatsDict[chunkInfo.filterID][sourceID] = self.PhtTuple( 1131 photTuple.nGoodObs, photTuple.nFlaggedObs, photTuple.nMissingObs, 1132 photTuple.bestAper, photTuple.minCadence, photTuple.medCadence, 1133 photTuple.maxCadence, photTuple.totalPeriod, photTuple.meanMag, 1134 photTuple.medianMag, photTuple.MagRms, photTuple.MagMAD, 1135 photTuple.skewness, photTuple.MinMag, photTuple.MaxMag, 1136 photTuple.Ngt3sig, photTuple.ExpRms, photTuple.IntRms, 1137 chisqArray[ii], ndofArray[ii], pVar[ii], varClass[ii])
1138 1139 #-------------------------------------------------------------------------- 1140
1141 - def _calcInitialStats(self, chunkInfo):
1142 """ Calculate which is best aperture and statistics that are necessary 1143 for noise model. 1144 """ 1145 uniqueSources, goodObjData, goodObsOfSrc = \ 1146 self._queryObjData(chunkInfo, isInitStats=True) 1147 1148 Logger.addMessage("...collected data...", alwaysLog=False) 1149 1150 magDataIndex = 2 1151 beginValgd = 0 1152 for sourceID in uniqueSources: 1153 numGoodObs = goodObsOfSrc[sourceID] 1154 if numGoodObs > 1: 1155 # test apers between bestAperRange values 1156 aperMagArray = goodObjData[beginValgd:(beginValgd + numGoodObs), 1157 magDataIndex + self.bestAperRange.min: 1158 magDataIndex + self.bestAperRange.max] 1159 1160 medianAr, madAr = stats.clippedMADHistogram( 1161 aperMagArray, minSd=0.0001, retValues="median,MAD") 1162 1163 # What if all are crap. 1164 tmpAr = numpy.where(numpy.greater(medianAr, 0.), madAr, 99.) 1165 if numpy.min(tmpAr) < 99.: 1166 bestAper = numpy.nonzero(numpy.equal(tmpAr, 1167 numpy.min(tmpAr)))[0][0] + self.bestAperRange.min 1168 else: 1169 bestAper = self.phtCols.bestAper.getDefaultValue() 1170 1171 if bestAper > 0.: 1172 med = medianAr[bestAper - self.bestAperRange.min] 1173 1174 # remove defaults 1175 bestAperData = numpy.transpose( 1176 aperMagArray)[bestAper - self.bestAperRange.min][ 1177 numpy.nonzero( 1178 numpy.where(numpy.greater(numpy.transpose( 1179 aperMagArray)[bestAper - self.bestAperRange.min], 0.), 1180 numpy.transpose( 1181 aperMagArray)[bestAper - self.bestAperRange.min], 0))] 1182 1183 mean = stats.calcMean(bestAperData) 1184 std = stats.calcSd(bestAperData) 1185 1186 # Convert to cartesian 1187 cartVec = astro.convSkyToCartesian( 1188 goodObjData[beginValgd:(beginValgd + numGoodObs), 1:3]) 1189 1190 # @TODO: 0h boundary 1191 medPos = stats.clippedMADHistogram( 1192 cartVec[:, 0:3], minSd=0.0001, retValues="median") 1193 1194 deltAstrom = numpy.multiply(3600., astro.angularSepArr( 1195 cartVec, numpy.ones([cartVec.shape[0], 3]) * medPos, False)) 1196 1197 stdAst = stats.calcSd(deltAstrom) 1198 if (goodObjData[beginValgd][0].astype(numpy.int32) 1199 in [-1, -2] and med < (self.expLimDict[chunkInfo.filterID] - 0.5) and 1200 med > (self.expLimDict[chunkInfo.filterID] - 8.5) and numGoodObs >= 4): 1201 # Important for noise model. 1202 self.meanRmsList[chunkInfo.filterID].append((mean, std)) 1203 self.meanAstromList[chunkInfo.filterID].append((mean, stdAst)) 1204 self.bestAperDict[(sourceID, chunkInfo.filterID)] = bestAper 1205 else: 1206 self.bestAperDict[(sourceID, chunkInfo.filterID)] = dbc.intDefault() 1207 else: 1208 self.bestAperDict[(sourceID, chunkInfo.filterID)] = dbc.intDefault() 1209 1210 beginValgd += numGoodObs
1211 1212 #-------------------------------------------------------------------------- 1213
1214 - def _calcNoiseModel(self, frameSetID, filterID):
1215 """ ? 1216 """ 1217 #minMag,maxMag=(self.expML-8.5),(self.expML-0.5) 1218 nBins = max(len(self.meanRmsList[filterID]) // Cu6.numObjsBin, 10) 1219 nObjsBin = len(self.meanRmsList[filterID]) // nBins 1220 meanRmsData = numpy.array(sorted(self.meanRmsList[filterID])) 1221 # Astrometry 1222 meanAstromData = numpy.array(sorted(self.meanAstromList[filterID])) 1223 if meanRmsData.shape[0] > 20: 1224 1225 binData = [] 1226 binAstData = [] 1227 for magBin in range(nBins): 1228 bestMag = numpy.median(meanRmsData[( 1229 magBin * nObjsBin):((magBin + 1) * nObjsBin), 0:1]) 1230 medErr, madErr = stats.calcMAD(meanRmsData[( 1231 magBin * nObjsBin):((magBin + 1) * nObjsBin), 1:2], 0.000001) 1232 medAstErr, madAstErr = stats.calcMAD(meanAstromData[( 1233 magBin * nObjsBin):((magBin + 1) * nObjsBin), 1:2], 0.000001) 1234 binData.append((float(bestMag), float(medErr), 1235 1.48 * float(madErr))) 1236 binAstData.append((float(bestMag), float(medAstErr), 1237 1.48 * float(madAstErr))) 1238 self.phtFit[frameSetID][filterID] = LSQ.leastSquaresFit(stratModel, self._initialPhtParam, binData) 1239 self.phtNDof[frameSetID][filterID] = len(binData) - 3 1240 self.astFit[frameSetID][filterID] = LSQ.leastSquaresFit(stratModel, self._initialAstParam, binAstData) 1241 self.astNDof[frameSetID][filterID] = len(binAstData) - 3 1242 1243 self.magMxPht[frameSetID][filterID] = \ 1244 self._findTurningPoint(stratModel, filterID, 1245 self.phtFit[frameSetID][filterID]) 1246 1247 self.magMxAst[frameSetID][filterID] = \ 1248 self._findTurningPoint(stratModel, filterID, 1249 self.astFit[frameSetID][filterID]) 1250 1251 else: 1252 self.phtFit[frameSetID][filterID] = (( 1253 self.varFilCols.aStratPht.getDefaultValue(), 1254 self.varFilCols.bStratPht.getDefaultValue(), 1255 self.varFilCols.cStratPht.getDefaultValue()), 1256 self.varFilCols.chiSqPht.getDefaultValue()) 1257 self.phtNDof[frameSetID][filterID] = self.varFilCols.nDofPht.getDefaultValue() 1258 self.astFit[frameSetID][filterID] = (( 1259 self.varFilCols.aStratAst.getDefaultValue(), 1260 self.varFilCols.bStratAst.getDefaultValue(), 1261 self.varFilCols.cStratAst.getDefaultValue()), 1262 self.varFilCols.chiSqAst.getDefaultValue()) 1263 self.astNDof[frameSetID][filterID] = self.varFilCols.nDofAst.getDefaultValue() 1264 self.magMxPht[frameSetID][filterID] = 0. 1265 self.magMxAst[frameSetID][filterID] = 0. 1266 del meanRmsData 1267 del meanAstromData 1268 Logger.addMessage( 1269 "Best fit to minimimum photometic rms as a function of magnitude using " 1270 "Strateva model is: [%6.5f %6.5e %6.5e], with a chi-squared " 1271 "value of %5.2f in frameSet %d and observed in filter %s." % 1272 (self.phtFit[frameSetID][filterID][0][0], self.phtFit[frameSetID][filterID][0][1], 1273 self.phtFit[frameSetID][filterID][0][2], self.phtFit[frameSetID][filterID][1], 1274 frameSetID, self.shtName), 1275 alwaysLog=False) 1276 Logger.addMessage( 1277 "Best fit to minimimum astrometic rms as a function of magnitude using " 1278 "Strateva model is: [%6.5f %6.5e %6.5e], with a chi-squared " 1279 "value of %5.2f in frameSet %d and observed in filter %s." % 1280 (self.astFit[frameSetID][filterID][0][0], self.astFit[frameSetID][filterID][0][1], 1281 self.astFit[frameSetID][filterID][0][2], self.astFit[frameSetID][filterID][1], 1282 frameSetID, self.shtName), 1283 alwaysLog=False)
1284 # Add data to VarFrameSetInfo table 1285 1286 #-------------------------------------------------------------------------- 1287
1288 - def _calcVariableClass(self, uniqueSources):
1289 """ ? 1290 """ 1291 self.varClassDict = {} 1292 for sourceID in uniqueSources: 1293 maxNgoodObs = max([self.sourceStatsDict[filterID][sourceID].nGoodObs 1294 for filterID, _shtName in self._synPassbands]) 1295 1296 # Merged variability class 1297 # Add in astrometric var? 1298 sweight = 0 1299 swrms = 0 1300 for filterID, _shtName in self._synPassbands: 1301 if maxNgoodObs > Cu6.minNGoodObsVar: 1302 weight = max(0, ( 1303 self.sourceStatsDict[filterID][sourceID].nGoodObs - 1304 Cu6.minNGoodObsVar) / 1305 (maxNgoodObs - Cu6.minNGoodObsVar)) 1306 else: 1307 weight = 0. 1308 sweight += weight 1309 swrms += weight * ( 1310 self.sourceStatsDict[filterID][sourceID].IntRms / 1311 self.sourceStatsDict[filterID][sourceID].ExpRms) 1312 photVarClass = (0 if sweight <= 0 else numpy.where( 1313 numpy.greater(swrms / sweight, Cu6.intRmsLimit), 1, 0)) 1314 self.varClassDict[sourceID] = photVarClass
1315 1316 #-------------------------------------------------------------------------- 1317
1318 - def _createOrphanTable(self):
1319 """ Creates an orphan table for the programme. 1320 """ 1321 # Find detections that not matched to sources 1322 1323 # For each intermediate multiframe in programme 1324 # @@todo: get many near edges of survey region (deep sources 1325 # in dither offset) and around bright stars 1326 # @@todo: sort out SynopticSource 1327 dML = 1. 1328 1329 frameTypeSel = queries.getFrameSelection( 1330 self.programme.getAttr("epochFrameType"), noDeeps=True) 1331 mfIDs = self.archive.query( 1332 selectStr="md.multiframeID,extNum,md.filterID,totalExpTime", 1333 fromStr="ProgrammeFrame as p,Multiframe as m," 1334 "MultiframeDetector as md", 1335 whereStr="m.multiframeID=p.multiframeID AND m.multiframeID=" 1336 "md.multiframeID AND %s AND p.productID<0 AND " 1337 "md.%s" % (frameTypeSel, DepCodes.selectNonDeprecated)) 1338 for mfID, extNum, filterID, totExpTime in mfIDs: 1339 magLim = self._dict1sML[filterID] + 1.25 * math.log10(totExpTime) - dML 1340 # @TODO: mjdObs -- VSA 1341 # @TODO: replace IN with INNER JOIN? 1342 orphans = self.archive.query( 1343 selectStr="r.multiframeID,r.extNum,%s,r.seqNum,mjdObs," 1344 "r.filterID,a.ra,a.dec,p.aperMag3,r.ppErrBits,%s" 1345 % (self.cuEventID, magLim + dML), 1346 fromStr="%s as r,%s as p,%s as a,Multiframe as m" % ( 1347 self.programme.getDetectionTable(), 1348 self.programme.getPhotometryTable(), 1349 self.programme.getAstrometryTable()), 1350 whereStr="r.multiframeID=p.multiframeID AND r.extNum=p.extNum " 1351 "AND r.seqNum=p.seqNum AND p.multiframeID=%d AND p.extNum=%d " 1352 "AND m.multiframeID=r.multiframeID AND a.multiframeID=" 1353 "r.multiframeID AND a.extNum=r.extNum AND a.seqNum=r.seqNum " 1354 "AND objID NOT IN (SELECT objID FROM %s as rr,%s as pp,%s as b" 1355 " WHERE b.multiframeID=p.multiframeID AND b.extNum=p.extNum " 1356 "AND pp.aperMag3>0 AND pp.aperMag3<%s AND " 1357 "b.multiframeID=rr.multiframeID AND b.extNum=rr.extNum AND " 1358 "b.seqNum=rr.seqNum AND pp.multiframeID=rr.multiframeID AND " 1359 "pp.extNum=rr.extNum AND pp.seqNum=rr.seqNum) AND ppErrBits " 1360 "& 0x00400000 = 0 AND ppErrBits & 0x00080000 = 0 AND " 1361 "ppErrBits & 0x00000010 = 0 AND p.aperMag3>0 AND " 1362 "p.aperMag3<%s" % (mfID, extNum, self.programme.getDetectionTable(), 1363 self.programme.getPhotometryTable(), 1364 self.programme.getSynopticBestMatchTable(), magLim, magLim)) 1365 for row in orphans: 1366 self.archive.insertData(self.programme.getOrphanTable(), row, 1367 enforcePKC=True)
1368 1369 #-------------------------------------------------------------------------- 1370
1371 - def _updateHalfSpaces(self, fTypeSel):
1372 """ Calculates half-spaces for each detector of the correct frame type. 1373 """ 1374 # TODO Get next cuEventID 1375 astroInfoTable = self.programme.getAstroInfoTable() 1376 dateRange = "utDate BETWEEN '%s' AND '%s'" % self.dateRange 1377 1378 # USE LEFT JOIN with astroInfo table 1379 # Setup half-spaces 2 pixels away from dither boundary and detector 1380 # boundary 1381 # @TODO: update for tiles 1382 nCoordInHSDef = 3 1383 initPos = [0, 2, 4, 6] 1384 boundWidth = 2 1385 innerOff = [(boundWidth, boundWidth), (boundWidth, 0), 1386 (boundWidth, -boundWidth), (0, -boundWidth), 1387 (-boundWidth, -boundWidth), (-boundWidth, 0), 1388 (-boundWidth, boundWidth), (0, boundWidth)] 1389 1390 outerOff = [(-offX, -offY) for offX, offY in innerOff] 1391 1392 # TODO: No nustep in OSA 1393 hSpaceFrames = self.archive.query( 1394 selectStr="MultiframeDetector.multiframeID," 1395 "MultiframeDetector.extNum," 1396 "MultiframeDetector.axis1length,MultiframeDetector.axis2length," 1397 "MultiframeDetector.detRows,MultiframeDetector.detCols,m.nuStep," 1398 "crPixX, crPixY, crValX, crValY, cd11, cd12, cd21, cd22, " 1399 "pv21, pv22, pv23%s, isComplete" 1400 % (', pv24, pv25' if self.sysc.isVSA() else 1401 ', 0. as pv24, 0. as pv25'), 1402 fromStr="ProgrammeFrame as p,CurrentAstrometry as c," 1403 "Multiframe as m,MultiframeDetector LEFT JOIN %s ON " 1404 "MultiframeDetector.multiframeID=%s.multiframeID AND " 1405 "MultiframeDetector.extNum=%s.extNum" % ((astroInfoTable,) * 3), 1406 whereStr="p.programmeID=%d AND p.multiframeID=m.multiframeID AND " 1407 "m.multiframeID=MultiframeDetector.multiframeID AND c.multiframeID=" 1408 "MultiframeDetector.multiframeID AND c.extNum=" 1409 "MultiframeDetector.extNum AND %s AND %s AND " 1410 "MultiframeDetector.%s" # AND isComplete NOT IN (1, 3)" 1411 % (self.programmeID, fTypeSel, dateRange, 1412 DepCodes.selectNonDeprecated)) 1413 1414 numRowsAdded = 0 1415 for frame in hSpaceFrames: 1416 # @@TODO: This condition shouldn't be necessary if selected out, 1417 # and then it would be worthwhile having a ForLoopMonitor, 1418 # otherwise its inconsistent, and in fact distractingly pointless 1419 # when no updates necessary. 1420 if frame.isComplete == 1 or frame.isComplete == 3: 1421 continue 1422 1423 # Corners and mid points of frames 1424 detEdges = [(1, 1), 1425 (1, int(0.5 * (1 + frame.axis2length))), 1426 (1, frame.axis2length), 1427 (int(0.5 * (1 + frame.axis1length)), frame.axis2length), 1428 (frame.axis1length, frame.axis2length), 1429 (frame.axis1length, int(0.5 * (1 + frame.axis2length))), 1430 (frame.axis1length, 1), 1431 (int(0.5 * (1 + frame.axis1length)), 1)] 1432 1433 xdthOff = \ 1434 (frame.axis1length - int(math.sqrt(frame.nuStep)) * frame.detRows 1435 if self.programme.getAttr("epochFrameType") == 'stack' else 1436 frame.axis1length - int(math.sqrt(frame.nuStep)) * 1437 min(self.sysc.xWidthTileDith, self.sysc.xWidthTileEar)) 1438 1439 ydthOff = \ 1440 (frame.axis2length - int(math.sqrt(frame.nuStep)) * frame.detCols 1441 if self.programme.getAttr("epochFrameType") == 'stack' else 1442 frame.axis2length - int(math.sqrt(frame.nuStep)) * 1443 self.sysc.yWidthTileDith) 1444 1445 ditEdges = [ 1446 (xdthOff, ydthOff), 1447 (xdthOff, int(0.5 * (1 + frame.axis2length))), 1448 (xdthOff, frame.axis2length - ydthOff), 1449 (int(0.5 * (1 + frame.axis1length)), frame.axis2length - ydthOff), 1450 (frame.axis1length - xdthOff, frame.axis2length - ydthOff), 1451 (frame.axis1length - xdthOff, int(0.5 * (1 + frame.axis2length))), 1452 (frame.axis1length - xdthOff, ydthOff), 1453 (int(0.5 * (1 + frame.axis1length)), ydthOff)] 1454 1455 wcs = (frame.crPixX, frame.crPixY, frame.crValX, frame.crValY, 1456 frame.cd11, frame.cd12, frame.cd21, frame.cd22, frame.pv21, 1457 frame.pv22, frame.pv23, frame.pv24, frame.pv25) 1458 1459 values = [frame.multiframeID, frame.extNum, self.cuEventID] 1460 for bound in [detEdges, ditEdges]: 1461 for off in [innerOff, outerOff]: 1462 edges = [(x + off[ii][0], y + off[ii][1]) 1463 for ii, (x, y) in enumerate(bound)] 1464 1465 edges.append(edges[0]) # Add initial to end of sequence 1466 coords = astro.convPixelToCartSky(edges, wcs=wcs, 1467 projection=self.programme.getProjection()) 1468 1469 values += astro.calcHSpaces(coords, initPos, nCoordInHSDef) 1470 1471 values += [dbc.realDefault()] * 11 1472 values.append(int(frame.isComplete or 0) + 1) 1473 numRowsAdded += self.archive.insertData(astroInfoTable, values) 1474 1475 Logger.addMessage("...%s new rows inserted into %s" 1476 % (numRowsAdded, astroInfoTable))
1477 1478 #-------------------------------------------------------------------------- 1479
1480 - def _calcCorrelatedStats(self, chunkSQL):
1481 """ ? 1482 """ 1483 nPasses = \ 1484 sum(1 for filterID, _band in self._synPassbands if filterID <= 5) 1485 1486 correlDict = {} 1487 if nPasses > 0: 1488 correlData = self._queryCorrelData(chunkSQL) 1489 uSources = self._queryUniqueCorrelSources(chunkSQL) 1490 counter = 0 1491 for sourceID, nObs in uSources: 1492 correlList = [] 1493 for ii in range(len(self._synColours)): 1494 errBitCol = 1 + 2 * nPasses + ii 1495 errBitArray = numpy.add(correlData[counter:counter + nObs, 1496 errBitCol:errBitCol + 1], 1497 correlData[counter:counter + nObs, 1498 errBitCol + 1:errBitCol + 2]) 1499 obsArray = numpy.logical_and(numpy.greater( 1500 correlData[counter:counter + nObs, 1 + 2 * ii:2 + 2 * ii], 0), 1501 numpy.greater(correlData[counter:counter + nObs, 1502 3 + 2 * ii:4 + 2 * ii], 0)) 1503 tmp = numpy.where(numpy.logical_and(numpy.equal( 1504 errBitArray, 0), obsArray), correlData[ 1505 counter:counter + nObs, 1 + 2 * ii:2 + 2 * ii], 0) 1506 magA = tmp[numpy.nonzero(tmp)] 1507 1508 if magA.size > 1: 1509 tmp = numpy.where(numpy.logical_and(numpy.equal( 1510 errBitArray, 0), obsArray), correlData[ 1511 counter:counter + nObs, 2 + 2 * ii:3 + 2 * ii], 0) 1512 magErrA = tmp[numpy.nonzero(tmp)] 1513 tmp = numpy.where(numpy.logical_and(numpy.equal( 1514 errBitArray, 0), obsArray), correlData[ 1515 counter:counter + nObs, 3 + 2 * ii:4 + 2 * ii], 0) 1516 magB = tmp[numpy.nonzero(tmp)] 1517 tmp = numpy.where(numpy.logical_and(numpy.equal( 1518 errBitArray, 0), obsArray), correlData[ 1519 counter:counter + nObs, 4 + 2 * ii:5 + 2 * ii], 0) 1520 magErrB = tmp[numpy.nonzero(tmp)] 1521 iWS = float(stats.welchStetson(magA, magErrA, magB, 1522 magErrB)) 1523 else: 1524 iWS = self.corCols.iWS.getDefaultValue() 1525 1526 correlList.append(iWS) 1527 correlDict[sourceID] = correlList 1528 counter += nObs 1529 1530 return correlDict
1531 1532 #-------------------------------------------------------------------------- 1533
1534 - def _doDifficultMatches(self, checkPosList, checkPosDict, 1535 nonDetectMatchList):
1536 """ ? 1537 """ 1538 checkPosList = sorted(set(checkPosList)) 1539 for mfID, eNum, xExt, yExt, backSizeX, backSizeY in checkPosList: 1540 info = checkPosDict[mfID][eNum] 1541 positions = [(ra, dec) for _sID, ra, dec in info] 1542 1543 xyList = numpy.array(astro.convSkyToPixel(positions, mfID, eNum, 1544 self.archive, projection=self.programme.getProjection())) 1545 1546 if xyList.size <= 0: continue 1547 1548 xx = numpy.transpose(xyList[:, 0:1])[0] 1549 yy = numpy.transpose(xyList[:, 1:2])[0] 1550 detF = numpy.logical_and(numpy.logical_and(numpy.greater(xx, 0.), 1551 numpy.greater(yy, 0.)), numpy.logical_and( 1552 numpy.less_equal(xx, xExt), numpy.less_equal(yy, yExt))) 1553 1554 # @todo Change this to 1 dither offset 1555 flag = numpy.multiply(numpy.logical_or(numpy.logical_or( 1556 numpy.less(xx, backSizeX), 1557 numpy.less(yy, backSizeY)), numpy.logical_or( 1558 numpy.greater(xx, (xExt - backSizeX)), 1559 numpy.greater(yy, (yExt - backSizeY)))).astype( 1560 numpy.int16), 2) 1561 1562 for ii in range(xx.size): 1563 if detF[ii]: 1564 nonDetectMatchList.append((info[ii][0], mfID, eNum, 1565 self.cuEventID, 1566 dbc.intDefault(), 1567 dbc.realDefault(), flag[ii]))
1568 1569 #-------------------------------------------------------------------------- 1570
1571 - def _expRmsModel(self, filterID, params, mag, magMx):
1572 """ ? 1573 """ 1574 if mag >= self.expLimDict[filterID] - 8 and mag <= magMx: 1575 return stratModel(params, mag) 1576 1577 if mag <= self.expLimDict[filterID] - 8: 1578 return stratModel(params, self.expLimDict[filterID] - 8) 1579 1580 if mag >= magMx: 1581 return stratModel(params, magMx)
1582 1583 #-------------------------------------------------------------------------- 1584
1585 - def _fillInDefaultNoise(self, frameSets):
1586 """ ? 1587 """ 1588 defPhtFit = {} 1589 defAstFit = {} 1590 missingPhtFilts = [] 1591 missingAstFilts = [] 1592 self._getCurrentFits() 1593 for filterID, _shtName in self._passbands: 1594 dataPht = [] 1595 dataAst = [] 1596 for mBin in range(9): 1597 mag = self.expLimDict[filterID] - 8.5 + mBin 1598 rmsPhtList = [] 1599 rmsAstList = [] 1600 for frameSetID in frameSets: 1601 if (filterID in self.phtFit[frameSetID] and 1602 self.phtFit[frameSetID][filterID][1] > 0.): 1603 rmsPhtList.append(self._expRmsModel(filterID, 1604 self.phtFit[frameSetID][filterID][0], mag, 1605 self.magMxPht[frameSetID][filterID])) 1606 if (filterID in self.phtFit[frameSetID] and 1607 self.astFit[frameSetID][filterID][1] > 0.): 1608 rmsAstList.append(self._expRmsModel(filterID, 1609 self.astFit[frameSetID][filterID][0], mag, 1610 self.magMxAst[frameSetID][filterID])) 1611 # Calc mag,rms ... 1612 # average ... 1613 if len(rmsPhtList) > 0: 1614 medPhtRms, madPhtRms = stats.calcMAD(numpy.array(rmsPhtList), 0.000001) 1615 dataPht.append((mag, float(medPhtRms), 1616 1.48 * float(madPhtRms))) 1617 if len(rmsAstList) > 0: 1618 medAstRms, madAstRms = stats.calcMAD(numpy.array(rmsAstList), 0.000001) 1619 dataAst.append((mag, float(medAstRms), 1.48 * float(madAstRms))) 1620 1621 # fit... 1622 if len(dataPht) > 0: 1623 defPhtFit[filterID] = LSQ.leastSquaresFit(stratModel, self._initialPhtParam, dataPht) 1624 else: 1625 missingPhtFilts.append(filterID) 1626 1627 if len(dataAst) > 0: 1628 defAstFit[filterID] = LSQ.leastSquaresFit(stratModel, self._initialAstParam, dataAst) 1629 else: 1630 missingAstFilts.append(filterID) 1631 1632 # Set missing defaults.. 1633 defPhtFit = self._updateDefaults(defPhtFit, missingPhtFilts) 1634 defAstFit = self._updateDefaults(defAstFit, missingAstFilts) 1635 1636 # Now fill in values 1637 for frameSetID in frameSets: 1638 for filterID, shtName in self._passbands: 1639 if filterID in self.astFit[frameSetID] \ 1640 and self.astFit[frameSetID][filterID][1] < 0: 1641 self.astFit[frameSetID][filterID] = defAstFit[filterID] 1642 self.magMxAst[frameSetID][filterID] = \ 1643 self._findTurningPoint(stratModel, filterID, 1644 self.astFit[frameSetID][filterID]) 1645 # Update 1646 entryList = [ 1647 (shtName + "aStratAst", self.astFit[frameSetID][filterID][0][0]), 1648 (shtName + "bStratAst", self.astFit[frameSetID][filterID][0][1]), 1649 (shtName + "cStratAst", self.astFit[frameSetID][filterID][0][2]), 1650 (shtName + "chiSqAst", self.astFit[frameSetID][filterID][1]), 1651 (shtName + "nDofAst", self.varFilCols.nDofAst.getDefaultValue()), 1652 (shtName + "isDefAst", 1)] 1653 1654 self.archive.update(self.programme.getVarFrameSetInfoTable(), 1655 entryList, 1656 where="frameSetID=%s" % frameSetID) 1657 1658 elif filterID not in self.astFit[frameSetID]: 1659 self.astFit[frameSetID][filterID] = defAstFit[filterID] 1660 self.magMxAst[frameSetID][filterID] = \ 1661 self._findTurningPoint(stratModel, filterID, 1662 self.astFit[frameSetID][filterID]) 1663 1664 if filterID in self.phtFit[frameSetID] \ 1665 and self.phtFit[frameSetID][filterID][1] < 0: 1666 1667 self.phtFit[frameSetID][filterID] = defPhtFit[filterID] 1668 self.magMxPht[frameSetID][filterID] = \ 1669 self._findTurningPoint(stratModel, filterID, 1670 self.phtFit[frameSetID][filterID]) 1671 entryList = [ 1672 (shtName + "aStratPht", self.phtFit[frameSetID][filterID][0][0]), 1673 (shtName + "bStratPht", self.phtFit[frameSetID][filterID][0][1]), 1674 (shtName + "cStratPht", self.phtFit[frameSetID][filterID][0][2]), 1675 (shtName + "chiSqPht", self.phtFit[frameSetID][filterID][1]), 1676 (shtName + "nDofPht", self.varFilCols.nDofPht.getDefaultValue()), 1677 (shtName + "isDefPht", 1)] 1678 1679 self.archive.update(self.programme.getVarFrameSetInfoTable(), 1680 entryList, 1681 where="frameSetID=%s" % frameSetID) 1682 1683 elif filterID not in self.phtFit[frameSetID]: 1684 self.phtFit[frameSetID][filterID] = defPhtFit[filterID] 1685 self.magMxPht[frameSetID][filterID] = \ 1686 self._findTurningPoint(stratModel, filterID, 1687 self.phtFit[frameSetID][filterID])
1688 1689 #-------------------------------------------------------------------------- 1690
1691 - def _findBestMatches(self, frameSetID, sourceData, allFrames, bmatchCsvFile, 1692 isCorrel):
1693 """ ? 1694 """ 1695 # Calculate some things up-front for a minor performance gain 1696 allFramesArray = numpy.array(allFrames) 1697 idxOfFrame = dict(((frame.multiframeID, frame.extNum), idx) 1698 for idx, frame in enumerate(allFrames)) 1699 1700 checkPosDict = \ 1701 dict((frame.multiframeID, defaultdict(list)) for frame in allFrames) 1702 1703 dupeDetections = \ 1704 utils.getDuplicates(source.objID for source in sourceData) 1705 1706 outFile = open(bmatchCsvFile, 'a') 1707 checkPosList = [] 1708 nonDetectMatchList = [] 1709 1710 # NB: Assumes sourceData is already sorted by sID (do here if not) 1711 for sourceID, detections in groupby(sourceData, attrgetter("sID")): 1712 detections = list(detections) 1713 if len(detections) < 2: 1714 continue 1715 1716 sourceID += frameSetID 1717 1718 # eliminate any problems 1719 isMatchedFrame = numpy.zeros(len(allFrames), bool) 1720 goodMatches = [] 1721 lastFrame = None 1722 for detection in detections: 1723 curFrame = (detection.synFrameSetID if isCorrel else 1724 detection.multiframeID) 1725 1726 if curFrame == lastFrame: 1727 continue 1728 1729 filters = ((filterID for filterID, _band in self._passbands) 1730 if isCorrel else [None]) 1731 1732 for filterID in filters: 1733 mfID, eNum = ((detection.multiframeID, detection.extNum) 1734 if not isCorrel else 1735 self.synFrameSetDict[filterID][detection.synFrameSetID]) 1736 1737 if mfID > 0 and eNum > 0: 1738 try: 1739 isMatchedFrame[idxOfFrame[(mfID, eNum)]] = True 1740 except KeyError: 1741 # Outside frameSet but matching synframesets 1742 # overlap. 1743 Logger.addMessage("<Warning> Detector obs: " 1744 "mfID=%s and extNum=%s is not in list of " 1745 "expected frames, but is far away from the " 1746 "centre of the frameSet %s " 1747 % (mfID, eNum, frameSetID), alwaysLog=False) 1748 1749 matchStr = str(sourceID) 1750 matchStr += (",%s" % detection.synFrameSetID 1751 if isCorrel else ",%s,%s" % (mfID, eNum)) 1752 1753 dupeFlag = int(detection.objID in dupeDetections) 1754 matchStr += ",%s,%s,%5.3f,%s\n" % (self.cuEventID, 1755 detection.synSeqNum if isCorrel else detection.seqNum, 1756 detection.distanceMins * 60, dupeFlag) 1757 1758 goodMatches.append(matchStr) 1759 lastFrame = curFrame 1760 1761 outFile.writelines(goodMatches) 1762 1763 goodMatchMFID, flagMatchMFID, checkPos = \ 1764 self._findMissingDetections( 1765 numpy.array([detection.cx, detection.cy, detection.cz]), 1766 allFramesArray, isMatchedFrame) 1767 1768 # Find multiframes where match should have occurred. 1769 for idx, frame in enumerate(allFrames): 1770 mfID = frame.multiframeID 1771 eNum = frame.extNum 1772 if goodMatchMFID[idx]: 1773 nonDetectMatchList.append((sourceID, mfID, eNum, 1774 self.cuEventID, dbc.intDefault(), dbc.realDefault(), 0)) 1775 1776 elif flagMatchMFID[idx]: 1777 nonDetectMatchList.append((sourceID, mfID, eNum, 1778 self.cuEventID, dbc.intDefault(), dbc.realDefault(), 2)) 1779 1780 elif checkPos[idx]: 1781 checkPosList.append((mfID, eNum, frame.axis1Length, 1782 frame.axis2Length, frame.xOff, frame.yOff)) 1783 1784 checkPosDict[mfID][eNum].append( 1785 (sourceID, detection.ra, detection.dec)) 1786 1787 Logger.addMessage( 1788 "Just have to sort out more difficult objects.", alwaysLog=False) 1789 1790 self._doDifficultMatches(checkPosList, checkPosDict, nonDetectMatchList) 1791 1792 Logger.addMessage("Writing out more difficult objects", 1793 alwaysLog=False) 1794 1795 self._writeResults(outFile, nonDetectMatchList, isCorrel)
1796 1797 #-------------------------------------------------------------------------- 1798
1799 - def _findMissingDetections(self, pos, allFrames, alreadyMatchedMFID):
1800 """ ? 1801 """ 1802 boundPos = numpy.zeros(len(allFrames), numpy.int16) 1803 for ii in range(2): # Dith, Det?? 1804 for jj in range(2): # Inner and outer bounds 1805 isInHSpace = numpy.ones(len(allFrames), numpy.bool) 1806 for edge in range(4): 1807 initIndex = 6 + 32 * ii + 16 * jj + 4 * edge 1808 normVec = allFrames[:, initIndex:initIndex + 3] 1809 isInHSpace = numpy.logical_and(isInHSpace, 1810 numpy.less_equal(numpy.dot(normVec, pos), 1811 allFrames[:, initIndex + 3])) 1812 1813 boundPos += numpy.logical_not(isInHSpace).astype(numpy.int16) 1814 1815 # boundPos=0,1,2,3,4 1816 checkPos = numpy.logical_and(numpy.logical_not(alreadyMatchedMFID), 1817 numpy.logical_or(numpy.equal(boundPos, 1), numpy.equal(boundPos, 3))) 1818 1819 goodMatchMFID = numpy.logical_and( 1820 numpy.logical_not(alreadyMatchedMFID), numpy.equal(boundPos, 0)) 1821 1822 flagMatchMFID = numpy.logical_and( 1823 numpy.logical_not(alreadyMatchedMFID), numpy.equal(boundPos, 2)) 1824 1825 return goodMatchMFID, flagMatchMFID, checkPos
1826 1827 #-------------------------------------------------------------------------- 1828
1829 - def _findNPartsVar(self, frameSetID, isInitStats):
1830 """ Calculate number of parts (check the best value for all filters). 1831 """ 1832 # @FIXME: This is really slow in VVV - do not need to include 1833 # detection and photometry table in query. 1834 # BUT KEEP same selectStr: selectStr is only used to estimate 1835 # size of row 1836 maxNParts = 1 1837 fromStr = self._synSetup.getFromStr(isParts=True) 1838 whereStr = self._synSetup.getWhereStr(frameSetID, isParts=True) 1839 memFrac = Cu6.memFracNoise if isInitStats else Cu6.memFracVariability 1840 1841 for filterID, shtName in self._synPassbands: 1842 aprStr = ("p.aper" if not self._synSetup.isVarTableCorrel() 1843 else "ss.%sAper" % shtName) 1844 1845 magErrString = ','.join(aprStr + "Mag%sErr" % aper 1846 for aper in range(*self.bestAperRange)) 1847 1848 selectStr = magErrString.replace('Err', '') 1849 1850 if self._synSetup.isVarTableCorrel(): 1851 if isInitStats: 1852 selectStr = "s.mergedClass,ss.ra,ss.dec," + selectStr 1853 1854 nParts = self._queryNumParts(selectStr=selectStr, 1855 fromStr=fromStr + ", %s AS l" 1856 % self.programme.getSynopticMergeLogTable(), 1857 whereStr=whereStr + "AND x.synFrameSetID=l.synFrameSetID" 1858 " AND x.synSeqNum>0", 1859 memFrac=memFrac) 1860 else: 1861 if isInitStats: 1862 # Just need to get the correct size 1863 selectStr = "s.mergedClass,s.ra,s.dec," + selectStr 1864 1865 nParts = self._queryNumParts(selectStr=selectStr, 1866 fromStr=fromStr + ", Multiframe AS mf", 1867 whereStr=whereStr + "AND mf.filterID=%s AND " % filterID + 1868 "x.multiframeID=mf.multiframeID AND x.seqNum>0", 1869 memFrac=memFrac) 1870 1871 maxNParts = max(maxNParts, nParts) 1872 1873 if not isInitStats: 1874 # Check astrometry 1875 if self._synSetup.isVarTableCorrel(): 1876 nAttribPerBand = 6 if self.sysc.isVSA() else 5 1877 allData = "x.sourceID,ss.cx,ss.cy,ss.cz," 1878 allData += (','.join("ss.%sppErrBits,ss.%sseqNum," 1879 "(ss.%sXi*0.000004848),(ss.%sEta*0.000004848),ss.%sAperMag3," 1880 "ss.%smjd" % tuple((nAttribPerBand) * [band]) 1881 for _filterID, band in self._passbands) 1882 if self.sysc.isVSA() else 1883 "l.meanMjdObs," + ','.join("ss.%sppErrBits,ss.%sseqNum," 1884 "(ss.%sXi*0.000004848),(ss.%sEta*0.000004848),ss.%sAperMag3" 1885 % tuple(nAttribPerBand * [band]) 1886 for _filterID, band in self._passbands)) 1887 1888 nParts = self._queryNumParts(allData, 1889 fromStr="%s as s,%s as l,%s as x" 1890 % (self.programme.getSourceTable(), 1891 self.programme.getSynopticMergeLogTable(), 1892 self.programme.getSynopticBestMatchTable()), 1893 whereStr="x.synFrameSetID=l.synFrameSetID AND " 1894 "x.sourceID=s.sourceID AND s.frameSetID=%s" 1895 % frameSetID, 1896 memFrac=Cu6.memFactAstrom) 1897 else: 1898 nParts = self._queryNumParts( 1899 selectStr="x.sourceID,%s,a.cx,a.cy,a.cz,p.aperMag3," 1900 "r.ppErrBits,m.filterID" 1901 % ("r.mjd" if self.sysc.isVSA() else "m.mjdObs"), 1902 fromStr="Multiframe as m,%s as x,%s as s" % ( 1903 self.programme.getSynopticBestMatchTable(), 1904 self.programme.getSourceTable()), 1905 whereStr="x.sourceID=s.sourceID AND " 1906 "x.multiframeID=m.multiframeID AND s.frameSetID=%s" 1907 " AND x.seqNum>0" % frameSetID, 1908 memFrac=Cu6.memFactAstrom) 1909 1910 maxNParts = max(maxNParts, nParts) 1911 1912 return maxNParts
1913 1914 #-------------------------------------------------------------------------- 1915
1916 - def _findTurningPoint(self, model, filterID, fit):
1917 """ Find when d/dm =0. 1918 """ 1919 if model.__name__ != 'stratModel': 1920 return 1921 1922 magTp = turnStratModel(fit[0]) 1923 if magTp is None: 1924 magTp = self.expLimDict[filterID] + 10 1925 1926 return min(magTp, self.expLimDict[filterID] - 1)
1927 1928 #-------------------------------------------------------------------------- 1929
1930 - def _setGoodQualityBitflags(self):
1931 """ Sets the default list of good quality bit flags, 1932 """ 1933 self._goodBitflags = [] 1934 1935 # Include deblended objects in good 1936 if self.programme.getAttr("isLowGalacticLat"): 1937 self._goodBitflags.append(4) 1938 1939 # Sort out more complicated cases 1940 if len(self._goodBitflags) >= 1: 1941 nCombs = 2 ** len(self._goodBitflags) - 1 1942 if nCombs == 1: 1943 self._goodBitflags = [2 ** self._goodBitflags[0]] 1944 1945 self._goodBitflags = [0] + self._goodBitflags
1946 1947 #-------------------------------------------------------------------------- 1948
1949 - def _calcAstromFit(self, xList, yList, zList, errorList):
1950 """ ? 1951 """ 1952 mRa = self.astCols.ra.getDefaultValue() 1953 mDec = self.astCols.dec.getDefaultValue() 1954 errXd = self.astCols.sigRa.getDefaultValue() 1955 errYd = self.astCols.sigDec.getDefaultValue() 1956 chiSq = self.astCols.chi2.getDefaultValue() 1957 expAstErr = self.astCols.expAstErr.getDefaultValue() 1958 epoch = self.astCols.epoch.getDefaultValue() 1959 muRa = self.astCols.muRa.getDefaultValue() 1960 muDec = self.astCols.muDec.getDefaultValue() 1961 sigMuRa = self.astCols.sigMuRa.getDefaultValue() 1962 sigMuDec = self.astCols.sigMuDec.getDefaultValue() 1963 parallax = self.astCols.parallax.getDefaultValue() 1964 minPllx = self.astCols.minPllx.getDefaultValue() 1965 maxPllx = self.astCols.maxPllx.getDefaultValue() 1966 numGoodMatch = len(xList) 1967 if numGoodMatch < Cu6.minCount: 1968 return mRa, mDec, errXd, errYd, expAstErr, epoch, muRa, muDec, \ 1969 sigMuRa, sigMuDec, parallax, minPllx, maxPllx, chiSq, numGoodMatch 1970 1971 cartPosList = numpy.zeros([numGoodMatch, 4], numpy.float64) 1972 cartPosList[:, 0] = numpy.array(xList) 1973 cartPosList[:, 1] = numpy.array(yList) 1974 cartPosList[:, 2] = numpy.array(zList) 1975 cartPosList[:, 3] = numpy.array(errorList) 1976 1977 if self.motionModel == 'static': 1978 mRa, mDec, errXd, errYd = astro.calcGroupCentre(cartPosList) 1979 chiSq = 1 1980 1981 elif self.motionModel == 'wgtstatic': 1982 mRa, mDec, errXd, errYd, chiSq, expAstErr = \ 1983 astro.calcGroupCentre(cartPosList, isWeighted=True) 1984 1985 if math.isinf(errXd) or math.isinf(errYd): 1986 # FIXME: This is a temporary fix, use all bands properly instead. 1987 errXd = self.astCols.sigRa.getDefaultValue() 1988 errYd = self.astCols.sigDec.getDefaultValue() 1989 1990 expAstErr = (3600 * math.degrees(expAstErr) if expAstErr > 0 else 1991 self.astCols.expAstErr.getDefaultValue()) 1992 1993 mRa = (math.degrees(mRa) if mRa >= 0 else 1994 self.astCols.ra.getDefaultValue()) 1995 1996 mDec = (math.degrees(mDec) if mDec >= -math.pi else 1997 self.astCols.dec.getDefaultValue()) 1998 1999 errXd = (3600 * math.degrees(errXd) if errXd > 0 else 2000 self.astCols.sigRa.getDefaultValue()) 2001 2002 errYd = (3600 * math.degrees(errYd) if errYd > 0 else 2003 self.astCols.sigDec.getDefaultValue()) 2004 2005 if not (mRa >= 0 and mDec >= -90): 2006 chiSq = self.astCols.chi2.getDefaultValue() 2007 2008 return mRa, mDec, errXd, errYd, expAstErr, epoch, muRa, muDec, \ 2009 sigMuRa, sigMuDec, parallax, minPllx, maxPllx, chiSq, numGoodMatch
2010 2011 #-------------------------------------------------------------------------- 2012
2013 - def _queryBadEpochSet(self):
2014 """ 2015 Finds any extensions that have no data in DetectionRaw, 2016 DetectionPhotometry and DetectionAstrometry. 2017 2018 """ 2019 Logger.addMessage("Checking for missing catalogue data...") 2020 2021 # @TODO: Make this as function of detector not multiframe? 2022 # If not ingested - not ingested for whole mf? 2023 frameSelection = \ 2024 queries.getFrameSelection(self.programme.getAttr("epochFrameType")) 2025 2026 self.badEpochs = set() 2027 for table in [self.programme.getDetectionTable(), 2028 self.programme.getAstrometryTable(), 2029 self.programme.getPhotometryTable()]: 2030 2031 # @TODO: Replace IN with INNER JOIN? 2032 self.badEpochs.update(self.archive.query("d.multiframeID", 2033 "MultiframeDetector as d,Multiframe as m,ProgrammeFrame as p", 2034 "p.programmeID=%s and m.multiframeID=p.multiframeID and " 2035 "%s and d.multiframeID=m.multiframeID and d.multiframeID " 2036 "not in (select d.multiframeID from MultiframeDetector as d," 2037 "Multiframe as m,%s as a where m.multiframeID=d.multiframeID " 2038 "and d.multiframeID=a.multiframeID and d.extNum=a.extNum and " 2039 "%s and seqNum<0)" % (self.programmeID, frameSelection, table, 2040 frameSelection)))
2041 2042 #-------------------------------------------------------------------------- 2043
2044 - def _queryBestAperTable(self, frameSetID):
2045 """ ? 2046 """ 2047 aperString = ','.join(shtName + "bestAper" 2048 for _filterID, shtName in self._synPassbands) 2049 2050 aperData = self.archive.query("sourceID," + aperString, 2051 self.programme.getBestAperTable(), "frameSetID = %s" % frameSetID) 2052 2053 newData = {} 2054 for ii, (filterID, _shtName) in enumerate(self._synPassbands): 2055 for data in aperData: 2056 newData[(data.sourceID, filterID)] = data[ii + 1] 2057 2058 return newData
2059 2060 #-------------------------------------------------------------------------- 2061
2062 - def _getCurrentFits(self):
2063 """ ? 2064 """ 2065 fitInfo = self.programme.getVarFrameSetInfoTable() 2066 2067 # convert into dictionary 2068 for fSet in self.archive.query("*", fitInfo): 2069 frameSetID = fSet.frameSetID 2070 for ii, (filterID, _shtName) in enumerate(self._synPassbands): 2071 self.astFit[frameSetID][filterID] = \ 2072 ((fSet[3 + 13 * ii], fSet[4 + 13 * ii], fSet[5 + 13 * ii]), 2073 fSet[6 + 13 * ii]) 2074 2075 self.astNDof[frameSetID][filterID] = fSet[7 + 13 * ii] 2076 self.phtFit[frameSetID][filterID] = \ 2077 ((fSet[9 + 13 * ii], fSet[10 + 13 * ii], fSet[11 + 13 * ii]), 2078 fSet[12 + 13 * ii]) 2079 2080 self.phtNDof[frameSetID][filterID] = fSet[13 + 13 * ii] 2081 self.magMxPht[frameSetID][filterID] = \ 2082 self._findTurningPoint(stratModel, filterID, 2083 self.phtFit[frameSetID][filterID]) 2084 2085 self.magMxAst[frameSetID][filterID] = \ 2086 self._findTurningPoint(stratModel, filterID, 2087 self.astFit[frameSetID][filterID])
2088 2089 #-------------------------------------------------------------------------- 2090
2091 - def _getFrameSetDict(self):
2092 """ Get dictionary of frame sets needed in each part 2093 """ 2094 Logger.addMessage("Preparing list of frame sets...") 2095 frameSetsForTable = {} 2096 mergeLog = self.programme.getMergeLogTable() 2097 2098 # Best matching 2099 bmTables = [] if self.calcVarOnly else self._synSetup.bestMatchTables 2100 2101 # if all entries in mergelog are more recent - just delete all 2102 minMLcuEventID = \ 2103 self.archive.query("MIN(cuEventID)", mergeLog, firstOnly=True) 2104 2105 for bmTable in bmTables: 2106 if (minMLcuEventID > self.archive.queryAttrMax("cuEventID", bmTable) 2107 or not self.archive.queryEntriesExist(bmTable)): 2108 newFrameSets = self.archive.query("frameSetID", mergeLog) 2109 else: 2110 # Use cuEventID 2111 # @TODO: Replace IN with INNER JOIN? 2112 newFrameSets = self.archive.query("frameSetID", mergeLog, 2113 whereStr="frameSetID NOT IN (%s)" 2114 % SelectSQL("DISTINCT S.frameSetID", 2115 table=Join([(self.programme.getSourceTable(), "S"), 2116 (bmTable, "B")], "sourceID"), 2117 where="B.cuEventID >= S.cuEventID")) 2118 2119 frameSetsForTable[bmTable] = self._getFinalFrameSets(newFrameSets) 2120 2121 # VarFrameSetInfo 2122 # @TODO: Replace IN with INNER JOIN? 2123 newFrameSets = self.archive.query("frameSetID", mergeLog, 2124 whereStr="frameSetID NOT IN (%s)" % SelectSQL("DISTINCT S.frameSetID", 2125 table="%s AS S, %s AS B, %s AS I" % (self.programme.getSourceTable(), 2126 self.programme.getSynopticBestMatchTable(), 2127 self.programme.getVarFrameSetInfoTable()), 2128 where="B.cuEventID >= S.cuEventID AND B.sourceID=S.sourceID " 2129 + "AND i.frameSetID=s.frameSetID")) 2130 2131 frameSetsForTable[self.programme.getVarFrameSetInfoTable()] = \ 2132 self._getFinalFrameSets(newFrameSets) 2133 2134 # Variability 2135 # @TODO: Replace IN with INNER JOIN? 2136 newFrameSets = self.archive.query("frameSetID", mergeLog, 2137 whereStr="frameSetID NOT IN (%s)" % SelectSQL("DISTINCT I.frameSetID", 2138 table=Join([(self.programme.getVarFrameSetInfoTable(), "I"), 2139 (self.programme.getVariabilityTable(), "V")], "frameSetID"), 2140 where="V.cuEventID >= I.cuEventID")) 2141 2142 frameSetsForTable[self.programme.getVariabilityTable()] = \ 2143 self._getFinalFrameSets(newFrameSets) 2144 2145 # @TODO: if frameSet in bestMatchDict - must be in others 2146 # if in VarFrameSetDict must be in Variability 2147 2148 return frameSetsForTable
2149 2150 #-------------------------------------------------------------------------- 2151
2152 - def _getFinalFrameSets(self, newFrameSets):
2153 """ ? 2154 """ 2155 if not newFrameSets: 2156 return [] 2157 2158 if self.frameSetList: 2159 return sorted(set(newFrameSets).intersection(set(self.frameSetList))) 2160 2161 frameSets = set() 2162 for _fID, shortName in self._passbands: 2163 # @TODO: Replace IN with INNER JOIN? 2164 frameSetInfo = self.archive.query( 2165 selectStr="frameSetID, productID", 2166 fromStr=self.programme.getMergeLogTable() + " AS L, Multiframe" 2167 " AS M, RequiredFilters AS F, ProgrammeFrame AS P", 2168 whereStr="P.multiframeID=M.multiframeID AND " 2169 "L.%smfID=M.multiframeID AND " 2170 "F.filterID=M.filterID AND fileName!=%r AND " 2171 "F.programmeID=%s AND F.programmeID=P.programmeID " 2172 "AND isSynoptic=%s AND frameSetID in (%s)" % 2173 (shortName.lower(), dbc.charDefault(), self.programmeID, 2174 dbc.yes(), ','.join(map(str, newFrameSets)))) 2175 2176 frameSets.update(frameSetID 2177 for frameSetID, productID in frameSetInfo 2178 if self.minProdID <= productID <= self.maxProdID) 2179 2180 return sorted(frameSets)
2181 2182 #-------------------------------------------------------------------------- 2183
2184 - def _queryUniqueSources(self, sqlSelection):
2185 """ ? 2186 """ 2187 if "frameSetID" in sqlSelection: 2188 return self.archive.query("x.sourceID", 2189 fromStr="%s AS x, %s AS s" % 2190 (self.programme.getSynopticBestMatchTable(), 2191 self.programme.getSourceTable()), 2192 whereStr="s.sourceID=x.sourceID AND %s" % sqlSelection, 2193 groupBy="x.sourceID", orderBy="x.sourceID") 2194 else: 2195 return self.archive.query("sourceID", 2196 fromStr=self.programme.getSynopticBestMatchTable(), 2197 whereStr=sqlSelection, 2198 groupBy="sourceID", orderBy="sourceID")
2199 2200 #-------------------------------------------------------------------------- 2201
2202 - def _nearestGoodFilter(self, filterID, goodFilters):
2203 """ ? 2204 """ 2205 # @TODO: Replace IN with INNER JOIN? 2206 curFilterInfo = self.archive.query("filterID, (cutOn+cutOff) as medLamb", 2207 "Filter", "filterID=%s" % filterID, 2208 firstOnly=True) 2209 2210 filterInfo = self.archive.query("filterID, (cutOn+cutOff) as medLamb", 2211 "Filter", "filterID in (%s)" % 2212 (','.join(map(str, goodFilters)))) 2213 2214 absDev = [(fInfo.filterID, abs(fInfo.medLamb - curFilterInfo.medLamb)) 2215 for fInfo in filterInfo] 2216 2217 return sorted(absDev, key=itemgetter(1))[0][0]
2218 2219 #-------------------------------------------------------------------------- 2220
2221 - def _prepareFrameSets(self, frameSets, bmAttr):
2222 """ Prepares frame sets for BestMatch table calculation. 2223 """ 2224 Logger.addMessage("Preparing frame sets...") 2225 2226 # Remove frame sets that are up-to-date 2227 frameSets = [frameSetID for frameSetID in frameSets 2228 if not self.archive.queryEntriesExist( 2229 table=Join([bmAttr.tableName, 2230 self.programme.getSourceTable()], 2231 ["sourceID"]), 2232 where="frameSetID=%s" % frameSetID)] 2233 2234 if bmAttr.isCorrel: 2235 self.unqSynFrameSets = set() 2236 self.synFrameSetDict = {} 2237 self.synFramesDict = {} 2238 for filterID, band in self._passbands: 2239 synFrameSets = self.archive.query( 2240 selectStr="synFrameSetID, %smfID, %seNum" % (band, band), 2241 fromStr=self.programme.getSynopticMergeLogTable()) 2242 2243 self.unqSynFrameSets.update(sfsID 2244 for sfsID, mfID, eNum in synFrameSets) 2245 2246 self.synFramesDict.update(((mfID, eNum), (sfsID, filterID)) 2247 for sfsID, mfID, eNum in synFrameSets) 2248 2249 self.synFrameSetDict[filterID] = dict((sfsID, (mfID, eNum)) 2250 for sfsID, mfID, eNum in synFrameSets) 2251 2252 return frameSets
2253 2254 #-------------------------------------------------------------------------- 2255
2256 - def _queryAstromDataByMonth(self, tempTable):
2257 """ 2258 Builds up the queries of astromData by iterating over monthly tables. 2259 2260 """ 2261 detTables = self.programme.getMonthlyDetectionTables(self.dateRange, 2262 excludeDeeps=True, 2263 mfIDs=self.archive.query("DISTINCT multiframeID", tempTable)) 2264 2265 astromData = [] 2266 for detTable, mfIdRange in detTables: 2267 # @TODO: change ppErrBits selection to IN (0,16) 2268 astromData += self.archive.query( 2269 selectStr="sourceID, mjd, cx, cy, cz, aperMag3, filterID", 2270 fromStr=Join([(tempTable, 'X'), (detTable, 'D'), 2271 (detTable.replace("Raw", "Astrometry"), 'A'), 2272 (detTable.replace("Raw", "Photometry"), 'P')], 2273 attributes=["multiframeID", "extNum", "seqNum"]), 2274 whereStr="ppErrBits=0" 2275 + mfIdRange.replace("[multiframeID]", "D.multiframeID")) 2276 2277 # @TODO: These sorts may not be strictly necessary anymore - check 2278 astromData.sort(key=attrgetter('mjd')) 2279 2280 return sorted(astromData, key=attrgetter('sourceID'))
2281 2282 #-------------------------------------------------------------------------- 2283
2284 - def _queryAstromData(self, chunkSQL):
2285 """ ? 2286 """ 2287 # Use magnitude data to get astrometric error 2288 if not self._synSetup.isVarTableCorrel(): 2289 tempTable = schema.Table() 2290 tempTable.name = self.programme.getSynopticBestMatchTable() + "Temp" 2291 2292 # @TODO: Look into helpful primary keys, indices 2293 Logger.addMessage("Creating %s..." % tempTable, alwaysLog=False) 2294 numObjs = self._createTempAstromTable(tempTable, chunkSQL) 2295 Logger.addMessage("Inserted %s rows into %s" % (numObjs, tempTable), 2296 alwaysLog=False) 2297 try: 2298 astromData = self._queryAstromDataByMonth(tempTable) 2299 finally: 2300 self.archive.dropTable(tempTable, []) 2301 2302 return astromData 2303 2304 nAttribPerBand = 6 if self.sysc.isVSA() else 5 2305 allData = (','.join("ss.%sppErrBits,ss.%sseqNum," 2306 "(ss.%sXi*0.000004848),(ss.%sEta*0.000004848),ss.%sAperMag3," 2307 "ss.%smjd" % tuple(nAttribPerBand * [band]) 2308 for _filterID, band in self._passbands) if self.sysc.isVSA() else 2309 "l.meanMjdObs," + ','.join("ss.%sppErrBits,ss.%sseqNum," 2310 "(ss.%sXi*0.000004848),(ss.%sEta*0.000004848),ss.%sAperMag3" 2311 % tuple(nAttribPerBand * [band]) 2312 for _filterID, band in self._passbands)) 2313 2314 return self.archive.query("x.sourceID,ss.cx,ss.cy,ss.cz," + allData, 2315 fromStr="%s as s,%s as l,%s as x,%s as ss" 2316 % (self.programme.getSourceTable(), 2317 self.programme.getSynopticMergeLogTable(), 2318 self.programme.getSynopticBestMatchTable(), 2319 self.programme.getSynopticSourceTable()), 2320 whereStr="x.sourceID=s.sourceID AND x.synFrameSetID=" 2321 "ss.synFrameSetID AND x.synSeqNum=ss.synSeqNum AND " 2322 "ss.synFrameSetID=l.synFrameSetID AND x." + chunkSQL, 2323 orderBy="x.sourceID,l.meanMjdObs", forcePrOrder=True)
2324 2325 #-------------------------------------------------------------------------- 2326
2327 - def _queryCorrelData(self, chunkSQL):
2328 """ ? 2329 """ 2330 magnitudeString = ','.join("ss.%sAperMag3,ss.%sAperMag3Err" 2331 % (band, band) for filterID, band in self._synPassbands 2332 if filterID <= 5) 2333 2334 errFlagString = ','.join("ss.%sppErrBits" % band 2335 for filterID, band in self._synPassbands if filterID <= 5) 2336 2337 return numpy.array(self.archive.query( 2338 selectStr="s.sourceID,%s,%s" % (magnitudeString, errFlagString), 2339 fromStr="%s as s,%s as x,%s as ss" 2340 % (self.programme.getSourceTable(), 2341 self.programme.getSynopticBestMatchTable(), 2342 self.programme.getSynopticSourceTable()), 2343 whereStr="s.sourceID=x.sourceID AND x.synSeqNum=ss.synSeqNum" 2344 " AND x.synFrameSetID=ss.synFrameSetID AND x." + chunkSQL, 2345 orderBy="s.sourceID", forceOneProc=True))
2346 2347 #-------------------------------------------------------------------------- 2348
2349 - def _queryMaxImageExtent(self, frameSetID, isCorrel):
2350 """ Get max image extent from all deep stacks in frameSet. 2351 """ 2352 imgExt = [] 2353 for _filterID, band in self._passbands: 2354 imgExt += self.archive.query( 2355 selectStr="(imageExtent + " 2356 "dbo.fGreatCircleDist(centralRa,centralDec,ra,dec))", 2357 fromStr="CurrentAstrometry CA, %s ML" 2358 % self.programme.getMergeLogTable(), 2359 whereStr="CA.multiframeID=ML.%smfID AND " 2360 "CA.extNum=ML.%seNum AND frameSetID=%s" 2361 % (band, band, frameSetID), firstOnly=True) 2362 2363 return (max(imgExt) if not isCorrel else 2364 max(imgExt) + 60 * self.sysc.maxRaDecExtentStackDeg)
2365 2366 #-------------------------------------------------------------------------- 2367
2368 - def _queryNumParts(self, selectStr, fromStr, whereStr, 2369 memFrac=memFactBestMatch):
2370 """ Estimate memory required. 2371 """ 2372 # 64 bit array 2373 rowSize = 8 * len(selectStr.split(',')) 2374 whereStr += " OPTION (MAXDOP 1)" 2375 numRows = self.archive.queryNumRows(fromStr, whereStr) 2376 2377 return int(math.ceil(numRows * rowSize / (memFrac * self.availMemory)))
2378 2379 #-------------------------------------------------------------------------- 2380
2381 - def _queryCorrelObjData(self, chunkInfo):
2382 """ Queries database for correlated object data. 2383 """ 2384 magStr = ','.join("ss.%sAper" % chunkInfo.shtName + "Mag%s" % aper 2385 for aper in range(*self.bestAperRange)) 2386 2387 fromStr = self._synSetup.getFromStr() 2388 if not self.sysc.isVSA(): 2389 fromStr += ",%s as l" % self.programme.getSynopticMergeLogTable() 2390 2391 whereStr = self._synSetup.getWhereStr(chunkInfo.frameSetID) 2392 sourceSel = whereStr + " AND %s" % self._sourceIDSel(chunkInfo, 'x') 2393 2394 return self.archive.query( 2395 selectStr=(magStr + ", s.sourceID, s.mergedClass, %s, " 2396 "ss.ra, ss.dec, ss.%sppErrBits AS ppErrBits") 2397 % ("ss.%smjd AS mjd" % chunkInfo.shtName 2398 if self.sysc.isVSA() else "l.meanMjdObs AS mjd", 2399 chunkInfo.shtName), 2400 fromStr=fromStr, 2401 whereStr=sourceSel + " AND x.synSeqNum>0 AND ss.%sseqNum>0" 2402 % chunkInfo.shtName + ("" if self.sysc.isVSA() else 2403 " AND l.synFrameSetID=ss.synFrameSetID"), 2404 orderBy="x.sourceID", 2405 forceOneProc=True) # @TODO: Check if order & forceOneProc necessary.
2406 2407 #-------------------------------------------------------------------------- 2408
2409 - def _createTempObjTable(self, tempTable, chunkInfo):
2410 """ 2411 Put core data for _queryObjData() into temporary table using 2412 SELECT INTO with filter selection here using Multiframe. 2413 2414 Returns number of rows inserted into the new temporary table. 2415 """ 2416 # Ensure it doesn't exist 2417 self.archive.dropTable(tempTable, []) 2418 2419 tempTable.columns = \ 2420 "X.sourceID, mergedClass, X.multiframeID, extNum, seqNum" 2421 2422 if not self.sysc.isVSA(): 2423 tempTable.columns += ", mjdObs AS mjd" 2424 2425 tableJoin = Join( 2426 tables=[(self.programme.getSynopticBestMatchTable(), 'X'), 2427 (self.programme.getSourceTable(), 'S')], 2428 attributes=["sourceID"], 2429 subJoin=("Multiframe", "multiframeID")) 2430 2431 return self.archive.copyTable(tableJoin, tempTable, 2432 columns=tempTable.columns, 2433 where=self._sourceIDSel(chunkInfo, 'x') + " AND seqNum > 0" 2434 " AND filterID = %s" % chunkInfo.filterID)
2435 2436 #-------------------------------------------------------------------------- 2437
2438 - def _createTempAstromTable(self, tempTable, sourceIDs):
2439 """ 2440 Put core data for _queryAstromData() into temporary table using 2441 SELECT INTO. 2442 2443 Returns number of rows inserted into the new temporary table. 2444 """ 2445 # Ensure it doesn't exist 2446 self.archive.dropTable(tempTable, []) 2447 2448 tempTable.columns = "X.sourceID, X.multiframeID, extNum, seqNum" 2449 2450 if not self.sysc.isVSA(): 2451 tempTable.columns += ", mjdObs AS mjd" 2452 2453 tableJoin = Join( 2454 tables=[(self.programme.getSynopticBestMatchTable(), 'X'), 2455 (self.programme.getSourceTable(), 'S')], 2456 attributes=["sourceID"], 2457 subJoin=(("Multiframe", "multiframeID") 2458 if not self.sysc.isVSA() else None)) 2459 2460 return self.archive.copyTable(tableJoin, tempTable, tempTable.columns, 2461 where="S.%s AND seqNum > 0" % sourceIDs)
2462 2463 #-------------------------------------------------------------------------- 2464
2465 - def _queryObjDataByMonth(self, tempTable):
2466 """ Builds up the queries of objData by iterating over monthly tables. 2467 """ 2468 detTables = self.programme.getMonthlyDetectionTables(self.dateRange, 2469 excludeDeeps=True, mfIDs=self.archive.query("DISTINCT multiframeID", 2470 tempTable)) 2471 objDataSel = [] 2472 for detTable, mfIdRange in detTables: 2473 selectStr = ', '.join("aperMag%s" % aper 2474 for aper in range(*self.bestAperRange)) \ 2475 + ", sourceID, mergedClass, mjd, ra, dec, ppErrBits" 2476 2477 fromStr = Join([(tempTable, 'X'), (detTable, 'D'), 2478 (detTable.replace("Raw", "Astrometry"), 'A'), 2479 (detTable.replace("Raw", "Photometry"), 'P')], 2480 attributes=["multiframeID", "extNum", "seqNum"]) 2481 2482 objDataSel += self.archive.query(selectStr, fromStr, 2483 whereStr=mfIdRange.replace("[multiframeID]", "D.multiframeID")) 2484 2485 # @TODO: This may not be strictly necessary anymore - check 2486 return sorted(objDataSel, key=attrgetter('sourceID'))
2487 2488 #-------------------------------------------------------------------------- 2489
2490 - def _queryObjData(self, chunkInfo, isInitStats=False):
2491 """ 2492 Selects data to be used calculating stats. 2493 2494 Speed ups for VVV 2495 4) Improve queries - remove unnecessary tables, use indexed queries 2496 2497 """ 2498 # @TODO: Can't we get this info from objDataSel? 2499 uniqueSources = self._queryUniqueSources(self._sourceIDSel(chunkInfo)) 2500 2501 if self._synSetup.isVarTableCorrel(): 2502 objDataSel = self._queryCorrelObjData(chunkInfo) 2503 else: 2504 tempTable = schema.Table() 2505 tempTable.name = self.programme.getSynopticBestMatchTable() + "Temp" 2506 2507 # @TODO: Look into helpful primary keys, indices 2508 Logger.addMessage("Creating %s..." % tempTable, alwaysLog=False) 2509 numObjs = self._createTempObjTable(tempTable, chunkInfo) 2510 Logger.addMessage("Inserted %s rows into %s" % (numObjs, tempTable), 2511 alwaysLog=False) 2512 try: 2513 objDataSel = self._queryObjDataByMonth(tempTable) 2514 finally: 2515 self.archive.dropTable(tempTable, []) 2516 2517 numApers = len(range(*self.bestAperRange)) 2518 goodObsOfSrc = Counter(obj.sourceID for obj in objDataSel 2519 if obj.ppErrBits in self._goodBitflags) 2520 2521 if isInitStats: 2522 goodObjData = numpy.array([ 2523 (obj.mergedClass, obj.ra, obj.dec) + obj[:numApers] 2524 for obj in objDataSel if obj.ppErrBits in self._goodBitflags]) 2525 2526 return uniqueSources, goodObjData, goodObsOfSrc 2527 2528 objData = numpy.array([obj.mjd for obj in objDataSel]) 2529 goodObjData = numpy.array([obj[:numApers] for obj in objDataSel 2530 if obj.ppErrBits in self._goodBitflags]) 2531 2532 flaggedObsOfSrc = Counter(obj.sourceID for obj in objDataSel 2533 if obj.ppErrBits not in self._goodBitflags) 2534 2535 numMissingDetsOfSrc = self._queryMissingDetections(chunkInfo) 2536 2537 return uniqueSources, objData, numMissingDetsOfSrc, goodObjData, \ 2538 goodObsOfSrc, flaggedObsOfSrc
2539 2540 #-------------------------------------------------------------------------- 2541
2542 - def _queryMissingDetections(self, chunkInfo):
2543 """ Returns a dictionary of missing detections per source. 2544 """ 2545 if self._synSetup.isVarTableCorrel(): 2546 # Sort out filter 2547 missDetTable = "%s as x, %s as ss, %s AS sl" % \ 2548 (self.programme.getSynopticBestMatchTable(), 2549 self.programme.getSynopticSourceTable(), 2550 self.programme.getSynopticMergeLogTable()) 2551 2552 missDetWhere = ("x.synFrameSetID=sl.synFrameSetID AND " 2553 "x.synFrameSetID=ss.synFrameSetID AND " 2554 "x.synSeqNum=ss.synSeqNum AND " 2555 "(x.synSeqNum<0 OR ss.%sseqNum<0) AND sl.%smfID>0" 2556 % (chunkInfo.shtName, chunkInfo.shtName) + " AND " 2557 + self._sourceIDSel(chunkInfo, 'x')) 2558 else: 2559 missDetTable = "Multiframe as m,%s as x" \ 2560 % self.programme.getSynopticBestMatchTable() 2561 2562 missDetWhere = ("x.multiframeID=m.multiframeID and m.filterID=%s" 2563 % chunkInfo.filterID + " AND seqNum<0 AND " 2564 + self._sourceIDSel(chunkInfo, 'x')) 2565 2566 return defaultdict(int, self.archive.queryNumRows(missDetTable, 2567 missDetWhere, groupBy="sourceID"))
2568 2569 #-------------------------------------------------------------------------- 2570
2571 - def _sourceIDSel(self, chunkInfo, alias=None):
2572 """ Selection based on sourceID 2573 """ 2574 return "%ssourceID BETWEEN %s AND %s" % ("%s." % alias if alias else "", 2575 chunkInfo.firstSID + chunkInfo.part * chunkInfo.chunkSizeSID, 2576 chunkInfo.firstSID + (chunkInfo.part + 1) * chunkInfo.chunkSizeSID - 1)
2577 2578 #-------------------------------------------------------------------------- 2579
2580 - def _queryPossFrames(self, frameSetID, bmAttr):
2581 """ ? 2582 """ 2583 halfSpaceAttr = [] 2584 for region in ['dith', 'det']: 2585 for bound in ['inner', 'outer']: 2586 for edge in range(4): 2587 for attr in ['NormX', 'NormY', 'NormZ', 'Const']: 2588 halfSpaceAttr.append("%sedge%spos%d%s" 2589 % (region, bound, edge + 1, attr)) 2590 2591 mxImgExt = self._queryMaxImageExtent(frameSetID, bmAttr.isCorrel) 2592 possFrames = [] 2593 selectStr = ("ai.multiframeID, ai.extNum, axis1Length, axis2Length" 2594 ", (axis1Length - sqrt(nuStep) * detRows) AS xOff" 2595 ", (axis2Length - sqrt(nuStep) * detCols) AS yOff, " 2596 if self.programme.getAttr("epochFrameType") == 'stack' else 2597 "ai.multiframeID, ai.extNum, axis1Length, axis2Length" 2598 ", (axis1Length - sqrt(nuStep) * %s) AS xOff" 2599 ", (axis2Length - sqrt(nuStep) * %s) AS yOff, " 2600 % (min(self.sysc.xWidthTileDith, self.sysc.xWidthTileEar), 2601 self.sysc.yWidthTileDith)) 2602 2603 for filterID, _band in self._passbands: 2604 possFrames += self.archive.query( 2605 selectStr=selectStr + ", ".join(halfSpaceAttr), 2606 fromStr="Multiframe mf, CurrentAstrometry ca, %s ai, " 2607 "MultiframeDetector mfd, %s ml, ProgrammeFrame as p" 2608 % (self.programme.getAstroInfoTable(), 2609 self.programme.getMergeLogTable()), 2610 whereStr="ml.frameSetID=%s AND dbo.fGreatCircleDist(ml.ra" 2611 ", ml.dec, ca.centralRa, ca.centralDec)" 2612 "< (%s + ca.imageExtent+0.1)" 2613 " AND ca.multiframeID=ai.multiframeID" 2614 " AND ca.extNum=ai.extNum" 2615 " AND mf.multiframeID=ai.multiframeID" 2616 " AND mfd.multiframeID=ai.multiframeID" 2617 " AND mfd.extNum=ai.extNum" 2618 " AND mf.multiframeID=p.multiframeID" 2619 " AND mfd.filterID=%s AND p.programmeID=%s" 2620 " AND frameType not like '%%deep%%'" 2621 " AND utDate BETWEEN '%s' AND '%s'" 2622 % (frameSetID, mxImgExt, filterID, self.programmeID, 2623 bmAttr.dateRange.begin, bmAttr.dateRange.end), 2624 orderBy="ai.multiframeID") 2625 2626 return [frame for frame in possFrames 2627 if frame.multiframeID not in self.badEpochs]
2628 2629 #-------------------------------------------------------------------------- 2630
2631 - def _querySourceIdRange(self, frameSetID):
2632 """ Retrieve range of source IDs for given frame set. 2633 """ 2634 firstSID, lastSID = self.archive.query( 2635 selectStr="MIN(sourceID), MAX(sourceID)", 2636 fromStr=self.programme.getSourceTable() + self._srcTableIdx, 2637 whereStr="frameSetID=%s" % frameSetID, 2638 firstOnly=True) 2639 2640 return firstSID, lastSID - firstSID
2641 2642 #-------------------------------------------------------------------------- 2643
2644 - def _querySources(self, selectStr, fromStr, sourceSel, orderBy=''):
2645 """ ? 2646 """ 2647 return self.archive.query(selectStr, fromStr, whereStr=sourceSel, 2648 orderBy=orderBy)
2649 2650 #-------------------------------------------------------------------------- 2651
2652 - def _queryUniqueCorrelSources(self, chunkSQL):
2653 """ ? 2654 """ 2655 # @TODO: Use queryNumRows with a group by clause (or do in Python!) 2656 return self.archive.query("s.sourceID, COUNT(*)", 2657 fromStr="%s as s,%s as x,%s as ss" 2658 % (self.programme.getSourceTable(), 2659 self.programme.getSynopticBestMatchTable(), 2660 self.programme.getSynopticSourceTable()), 2661 whereStr="s.sourceID=x.sourceID AND x.synSeqNum=ss.synSeqNum" 2662 " AND x.synFrameSetID=ss.synFrameSetID AND s." + chunkSQL, 2663 groupBy="s.sourceID", 2664 orderBy="s.sourceID")
2665 2666 #-------------------------------------------------------------------------- 2667
2668 - def _splitVarSchema(self, varInfoSchema, varSchema):
2669 """ 2670 Separates out columns associated with astrometric, photometric and 2671 correlated parts of schema. 2672 2673 @return: A namedtuple type for photometric columns, a namedtuple of 2674 default values for photometric columns, a list of default 2675 values for astrometric columns and for info columns. 2676 @rtype: namedtuple, Photo, list, list 2677 2678 """ 2679 basicAttrs = ['sourceID', 'cuEventID', 'frameSetID', 'variableClass'] 2680 shtNames = \ 2681 [shtName.lower() for _fID, shtName in self._synPassbands] 2682 basicCols = [col for col in varSchema.columns if col.name in basicAttrs] 2683 varInfoAttrs, varInfoCols = selectColumns(varInfoSchema, shtNames) 2684 VITuple = namedtuple('VarInfo', ' '.join(varInfoAttrs)) 2685 photoAttrs, photoCols = selectColumns(varSchema, shtNames) 2686 PhotoTuple = namedtuple('Photo', ' '.join(photoAttrs)) 2687 otherCol = basicAttrs + [shtName + attr for shtName in shtNames 2688 for attr in photoAttrs] 2689 2690 if self._synColours: 2691 colrNames = [(band1 + band2).lower() for _fID1, _fID2, band1, band2 2692 in self._synColours] 2693 correlAttrs, correlCols = selectColumns(varSchema, colrNames) 2694 CorrelTuple = namedtuple('Correl', ' '.join(correlAttrs)) 2695 otherCol.extend(colrName + attr for colrName in colrNames 2696 for attr in correlAttrs) 2697 self.corCols = CorrelTuple(*correlCols) 2698 2699 # Astrometric attributes are those left over, apart from: 2700 # sourceID, cuEventID, frameSetID, variableClass 2701 astroAttrs = [column.name 2702 for column in varSchema.columns if column not in otherCol] 2703 astroCols = [column 2704 for column in varSchema.columns if column not in otherCol] 2705 AstroTuple = namedtuple('Astro', ' '.join(astroAttrs)) 2706 self.PhtTuple = PhotoTuple 2707 self.phtCols = PhotoTuple(*photoCols) 2708 self.astCols = AstroTuple(*astroCols) 2709 self.varFilCols = VITuple(*varInfoCols) 2710 BasicTuple = namedtuple('Basic', ' '.join(basicAttrs)) 2711 self.bscCols = BasicTuple(*basicCols)
2712 2713 #-------------------------------------------------------------------------- 2714
2715 - def _updateDefaults(self, defFitDict, missingFilts):
2716 """ 2717 """ 2718 goodFilters = [filterID for filterID in defFitDict] 2719 for filterID in missingFilts: 2720 if (self.expLimDict[filterID] > 0. and goodFilters): 2721 # which is nearest filter by wavelength 2722 bestFID = self._nearestGoodFilter(filterID, goodFilters) 2723 # Difference in mag lim 2724 deltaM = self.expLimDict[filterID] - self.expLimDict[bestFID] 2725 # new fit... 2726 fit = defFitDict[bestFID][0] 2727 fit[1] = 10 ** (-0.4 * deltaM) * fit[1] 2728 fit[2] = 10 ** (-0.8 * deltaM) * fit[2] 2729 defFitDict[filterID] = (fit, dbc.realDefault()) 2730 else: 2731 defFitDict[filterID] = ((dbc.realDefault(), dbc.realDefault(), 2732 dbc.realDefault()), dbc.realDefault()) 2733 return defFitDict
2734 2735 #-------------------------------------------------------------------------- 2736
2737 - def _writeResults(self, outFile, nonDetectMatchList, isCorrel):
2738 """ ? 2739 """ 2740 if isCorrel and nonDetectMatchList: 2741 # Find frames 2742 matchesList = [] 2743 for sID, mfID, eNum, cuEID, seqN, sep, flag in nonDetectMatchList: 2744 frameID, filterID = None, None 2745 try: 2746 frameID, filterID = self.synFramesDict[(mfID, eNum)] 2747 except KeyError: 2748 # Outside frameSet but matching synframesets 2749 # overlap. 2750 Logger.addMessage("<Warning> Detector obs: " 2751 "mfID=%s and extNum=%s is not in list of " 2752 "expected frames" 2753 % (mfID, eNum), alwaysLog=False) 2754 2755 2756 if frameID: 2757 matchesList.append((sID, frameID, (flag ** filterID))) 2758 2759 matchesList.sort() 2760 if matchesList: 2761 sourceID, frameSetID = matchesList[0][0], matchesList[0][1] 2762 flag = 0 2763 for sID, frameID, flg in matchesList: 2764 if sID == sourceID and frameID == frameSetID: 2765 flag += flg 2766 else: 2767 outFile.write("%d,%d,%d,%d,%5.3f,%d\n" % 2768 (sourceID, frameSetID, self.cuEventID, 2769 dbc.intDefault(), dbc.realDefault(), flag)) 2770 2771 sourceID = sID 2772 frameSetID = frameID 2773 flag = flg 2774 2775 outFile.write("%d,%d,%d,%d,%5.3f,%d\n" % 2776 (sourceID, frameSetID, self.cuEventID, 2777 dbc.intDefault(), dbc.realDefault(), flag)) 2778 2779 else: 2780 for sID, mfID, eNum, cuEID, seqN, sep, flag in nonDetectMatchList: 2781 outFile.write("%d,%d,%d,%d,%d,%5.3f,%d\n" % 2782 (sID, mfID, eNum, cuEID, seqN, sep, flag))
2783 2784 #-------------------------------------------------------------------------- 2785
2786 - def _writeToBestAperFile(self, outputCsvFile, frameSetID):
2787 """ Writes to best output file 2788 """ 2789 outputLines = [] 2790 uniqueSources = self._queryUniqueSources("s.frameSetID=%s" % frameSetID) 2791 for sourceID in uniqueSources: 2792 line = "%s,%s,%s" % (sourceID, self.cuEventID, frameSetID) 2793 for filterID, _shtName in self._synPassbands: 2794 line += ",%s" % (self.bestAperDict[(sourceID, filterID)]) 2795 outputLines.append(line + "\n") 2796 file(outputCsvFile, "a").writelines(outputLines) 2797 # reset dictionary 2798 self.bestAperDict = {}
2799 2800 #------------------------------------------------------------------------------ 2801
2802 -def selectColumns(table, names):
2803 """ 2804 Select specific columns for given table schema. 2805 2806 @param table: Table schema. 2807 @type table: Schema.Table 2808 @param names: List of common names to include. 2809 @type names: list(str) 2810 2811 @return: Lists of columns. 2812 @rtype: list(str), list(Schema.Attribute) 2813 2814 """ 2815 attrs = [] 2816 cols = [] 2817 for column in table.columns: 2818 if names and names[0] == column.name[:len(names[0])]: 2819 colRoot = column.name[len(names[0]):] 2820 if not any(name + colRoot not in table.columns for name in names): 2821 attrs.append(colRoot) 2822 cols.append(column) 2823 2824 return attrs, cols
2825 2826 #------------------------------------------------------------------------------ 2827
2828 -def getWriteFormat(columns, colList=[], noBegComma=False):
2829 """ 2830 """ 2831 fmtStr = ''.join(getFormat(col) for col in columns 2832 if not colList or col.name in colList) 2833 2834 if noBegComma and len(fmtStr) >= 1: 2835 return fmtStr[1:] 2836 2837 return fmtStr
2838 2839 #------------------------------------------------------------------------------ 2840
2841 -def getFormat(column):
2842 """ 2843 """ 2844 if 'int' in column.dataType.lower(): 2845 return ',%d' 2846 elif column.dataType.lower() == 'real' or column.dataType.lower() == 'float': 2847 return ',%f' 2848 else: 2849 return ',%s'
2850 2851 #------------------------------------------------------------------------------ 2852
2853 -def stratModel(param, mag):
2854 """ Function from Strateva et al. 2001. Theoretical fit to rms line. 2855 """ 2856 return param[0] + param[1] * (10 ** (0.4 * mag)) + param[2] * (10 ** (0.8 * mag))
2857 2858 #------------------------------------------------------------------------------ 2859
2860 -def turnStratModel(param):
2861 """ 2862 """ 2863 # @TODO: make this more general 2864 if param[1] / param[2] < 0: 2865 return 1 / (0.4 * math.log(10)) * math.log(-0.5 * param[1] / param[2])
2866 2867 #------------------------------------------------------------------------------ 2868 # Entry point for script. 2869 2870 # Allow module to be imported as well as executed from the command line 2871 if __name__ == '__main__': 2872 CLI.progArgs += [ 2873 CLI.Argument("programmeID", "DXS"), 2874 CLI.Argument("begin_date", "05A", isValOK=CLI.isDateOK), 2875 CLI.Argument("end_date", "05A", isValOK=CLI.isDateOK)] 2876 2877 # Define additional command-line interface options for Cu6 2878 CLI.progOpts += [ 2879 CLI.Option('a', 'bestAperRange', 2880 'range of best aper', 2881 'LIST'), 2882 CLI.Option('f', 'calcVarOnly', 2883 'skips best match and set up'), 2884 CLI.Option('l', 'frameSetList', 2885 'list of framesets to use', 2886 'LIST'), 2887 CLI.Option('k', 'keep_halfspaces', 2888 'keep existing half-spaces (when used in conjuction with --recreate)'), 2889 CLI.Option('m', 'motionModel', 2890 'Motion model used in astrometry', 'NAME'), 2891 CLI.Option('n', 'minProdID', 2892 'minimum product ID', 2893 'NUMBER', isValOK=lambda x: x.isdigit()), 2894 CLI.Option('o', 'oneFrameSet', 2895 'only produces synoptic tables for 1 frame set'), 2896 CLI.Option('r', 'recreate', 2897 'recreate tables from scratch with latest schema'), 2898 CLI.Option('v', 'verbose', 2899 'more verbose logging'), 2900 CLI.Option('x', 'maxProdID', 2901 'maximum product ID', 2902 'NUMBER', isValOK=lambda x: x.isdigit())] 2903 2904 cli = CLI(Cu6, "$Revision: 10053 $") 2905 Logger.isVerbose = cli.getOpt('verbose') 2906 Logger.addMessage(cli.getProgDetails()) 2907 2908 if cli.getOpt('recreate') and not CLI.isConfirmed("The -r/--recreate option " 2909 "will destroy all data presently in this programme's variability tables"): 2910 raise SystemExit 2911 2912 cu = Cu6(cli.getArg('programmeID'), cli=cli) 2913 try: 2914 cu.dateRange = cu.sysc.obsCal.dateRange(cli.getArg("begin_date"), 2915 cli.getArg("end_date")) 2916 except Exception as error: 2917 eType = "Invalid Option" 2918 Logger.addExceptionMessage(error, eType) 2919 raise SystemExit(eType + ": see log " + cu._log.pathName) 2920 2921 cu.calcVarOnly = cli.getOpt('calcVarOnly') 2922 cu.createNewTable = cli.getOpt('recreate') 2923 cu.keepHalfSpaces = cli.getOpt('keep_halfspaces') 2924 cu.oneFrameSet = cli.getOpt('oneFrameSet') 2925 2926 if cli.getOpt('maxProdID'): 2927 cu.maxProdID = int(cli.getOpt('maxProdID')) 2928 2929 if cli.getOpt('minProdID'): 2930 cu.minProdID = int(cli.getOpt('minProdID')) 2931 2932 if cli.getOpt('bestAperRange'): 2933 cu.bestAperRange = \ 2934 Cu6.Range(*map(int, cli.getOpt('bestAperRange').split(','))) 2935 2936 if cli.getOpt('frameSetList'): 2937 cu.frameSetList = \ 2938 list(map(int, cli.getOpt('frameSetList').split(','))) 2939 2940 if cli.getOpt('motionModel'): 2941 cu.motionModel = cli.getOpt('motionModel') 2942 2943 cu.run() 2944 2945 #------------------------------------------------------------------------------ 2946