Package helpers :: Module EsoRelease
[hide private]

Source Code for Module helpers.EsoRelease

   1  #! /usr/bin/env python 
   2  #------------------------------------------------------------------------------ 
   3  # $Id: EsoRelease.py 10246 2014-03-13 12:29:55Z NicholasCross $ 
   4  """ 
   5     Creates the FITS files necessary for the current ESO-SAF data release 
   6     requirements. These are currently copies of the deep tile images and 
   7     confidence maps; catalogues of these via detection table outgests; passband 
   8     merged source tables also via source table outgests and light curves from 
   9     a database query. 
  10   
  11     @author: R.S. Collins 
  12     @org:    WFAU, IfA, University of Edinburgh 
  13  """ 
  14  #------------------------------------------------------------------------------ 
  15  from __future__      import division, print_function 
  16  from future_builtins import map, zip 
  17   
  18  from   collections     import defaultdict, namedtuple 
  19  import hashlib 
  20  import math 
  21  from   multiprocessing import Process 
  22  import numpy 
  23  from   operator        import attrgetter, itemgetter 
  24  import os 
  25  import pyfits 
  26  import shutil 
  27  import textwrap 
  28  import copy 
  29   
  30  from   wsatools.CLI                 import CLI 
  31  import wsatools.CSV                     as csv 
  32  import wsatools.DbConnect.CommonQueries as queries 
  33  from   wsatools.DbConnect.CuSession import CuSession 
  34  import wsatools.DbConnect.DbConstants   as dbc 
  35  from   wsatools.DbConnect.DbSession import Join, PkLeftJoin, SelectSQL, Outgester, DbSession 
  36  import wsatools.DbConnect.Schema        as schema 
  37  import wsatools.FitsUtils               as fits 
  38  from   wsatools.Logger              import ForLoopMonitor, Logger 
  39  from   wsatools.ProvenanceUtils     import Provenance 
  40  from   wsatools.SystemConstants     import DepCodes 
  41  import wsatools.Utilities               as utils 
  42  import wsatools.Astrometry              as astro 
  43  from   wsatools.ObsCalendar         import VistaCal 
  44  import wsatools.ExternalProcess         as extp 
  45  import invocations.cu13.cu13            as cu13 
  46  #------------------------------------------------------------------------------ 
  47   
48 -class FileNameFormat(object):
49 """ Defines the ESO-SAF products file naming format. 50 """ 51 fileType = "%s" #: File type string. 52 formatStr = "%s_er%s_%%s_%s_%s_%%s.%s" #: File name format string. 53
54 - def __init__(self, programme, releaseNum, fileDir=None, fileType=fileType, 55 filters="%s"):
56 """ Initialises file path for given programme / release number. 57 """ 58 self.fileType = fileType 59 progName = programme.getAcronym().lower() 60 fileExt = self._getFileExt(fileType) 61 if 'MetaData' in fileType: 62 self.formatStr = "%s_er%s_%s_%s.%s" 63 self.formatStr %= (progName, releaseNum, filters, fileType, fileExt) 64 self.fileDir = fileDir 65 if fileDir: 66 self.formatStr = os.path.join(fileDir, self.formatStr)
67 68 #-------------------------------------------------------------------------- 69
70 - def _getFileExt(self, fileType):
71 """ @returns: File name extension for the current file type. 72 @rtype: str 73 """ 74 if fileType == "%s": 75 return fileType 76 77 if fileType in ["image", "conf", "deepimage", "deepconf"]: 78 return "fits.fz" 79 80 if fileType in ["jpeg"]: 81 return "jpg" 82 83 return "fits"
84 85 #-------------------------------------------------------------------------- 86
87 - def getFilePath(self, field, fileType=None, filterName=None, 88 offsetPos=None, extNum=None, useConf=False):
89 """ @returns: Full path to file of given field. 90 @rtype: str 91 """ 92 93 if not field and 'MetaData' in self.formatStr: 94 return self.formatStr 95 96 pointing = self._getPointing(field.ra, field.dec) 97 if offsetPos is not None: 98 pointing += "_off%s" % offsetPos 99 elif "frameType" in field._fields and 'tile' in field.frameType: 100 pointing += "_tile" 101 elif "frameType" in field._fields and 'mosaic' in field.frameType: 102 pointing += "_mosaic" 103 if "filterName" in field._fields: 104 filterName = field.filterName.lower() 105 106 formatTuple = (pointing,) 107 if filterName: 108 formatTuple += (filterName,) 109 110 if self.fileType == "%s": 111 fileType = fileType or ("conf" 112 if field.frameType.endswith("conf") else "image") 113 if "deep" in field.frameType and fileType in ["conf", "image"]: 114 fileType = "deep" + fileType 115 formatTuple += (fileType,) 116 117 if not useConf: 118 if extNum: 119 formatTuple += ("%s_%s" % (field.fileID, extNum),) 120 else: 121 formatTuple += (field.fileID,) 122 else: 123 if extNum: 124 formatTuple += ("%s_%s" % (field.confID, extNum),) 125 else: 126 formatTuple += (field.confID,) 127 if self.fileType == "%s": 128 formatTuple += (self._getFileExt(fileType),) 129 return self.formatStr % formatTuple
130 131 #-------------------------------------------------------------------------- 132
133 - def _getPointing(self, ra, dec):
134 """ Converts given ra and dec position, in decimal hours/degrees, to a 135 sexagesimal string. 136 """ 137 sign = "+" if dec > 0 else "-" 138 dec = abs(dec) 139 formatStr = "%02dh%02d%s%03dd%02d" 140 141 return formatStr % (ra, 60 * (ra - int(ra)), sign, dec, 60 * (dec - int(dec)))
142 143 #------------------------------------------------------------------------------ 144
145 -class EsoRelease(CuSession):
146 """ Creates the ESO-SAF release FITS files for the given programme. 147 """ 148 #-------------------------------------------------------------------------- 149 # Class constants (access as EsoRelease.varName) 150 151 #: Number of decimal places in ra 152 radp = 2 153 154 #: Number of decimal places in dec 155 decdp = 2 156 157 #: Curation Unit number 158 cuNum = 30 159 160 #: Fields to query for deep tile image details. 161 imageFields = "fileName, Multiframe.multiframeID AS fileID, " \ 162 "raBase as ra, decBase as dec, filterName, frameType, confID" 163 164 #: Named tuple 165 ImageField = namedtuple('ImageField', 166 'fileName fileID ra dec filterName frameType confID productID') 167 168 169 #: Bibliographic references for each programme. 170 # @TODO: Add in VVV and VIKING 171 refForProg = defaultdict(lambda : "2012A&A...548A.119C", 172 VMC="2011A&A...527A.116C", 173 VIDEO="2013MNRAS.428.1281J") 174 175 #: These FITS header keywords should not be messed with. 176 reservedKeys = ["BITPIX", "NAXIS", "XTENSION", "DATE", "PCOUNT", "GCOUNT", 177 "TFIELDS", "TCTYP", "TCRVL", "TCRPX", "BSCALE", "BZERO", 178 "CTYPE", "CRPIX", "CRVAL", "CD1_", "CD2_", "PV2_"] 179 #: Dictionary of regions 180 regionDict = dict() 181 182 #: Translation for SQL type to PyFITS type. 183 sqlToFitsDataType = {'tinyint': 'B', 'smallint': 'I', 'int': 'J', 184 'bigint': 'K', 'real': 'E', 'float': 'D'} 185 for ii in range(256): 186 sqlToFitsDataType['varchar(%s)' % (ii + 1)] = '%sA' % (ii + 1) 187 188 prodBits = {0:"Processing deep images", 189 1:"Processing first calibration merged-band catalogues", 190 2:"Processing final calibration phase 3 catalogues"} 191 currentRowNumber = None 192 numberRows = 0 193 #-------------------------------------------------------------------------- 194 # Public member variable default values - set from command-line options 195 piOnly = False #: Create PI info only 196 isQuickRun = False #: Just a test run for the FITS headers? 197 releaseNum = 1 #: ESO release number. 198 fieldIDs = None #: List of fields to release (if None, release all) 199 fileName = None #: FileName to derive fieldID list from 200 addExtProv = False #: Add in external provenance 201 # useOBdata = False #: Use OB data. 202 copyJpegsPaws = False #: Copy JEGS for pawprints 203 skipDeepData = False #: Skip deep data 204 esoProductTypes = 1 #: Eso product types: deeps 1, deeps + srclists 2, 205 # deeps catalogues 3 206 releaseDBName = "NONE" #: Static release DB that release is done from 207 completeFilters = False #: Only framesets with complete set of filters 208 istTrialRun = False #: 209 newBits = set() #: 210 directoryDict = {} 211 regionInfo = None 212 delKeysDict = defaultdict(list) 213 skipFieldList = [] 214 skipSources = False 215 tileObjectDict = {} 216 isCutOut = False 217 mPhotFilesDict = defaultdict(list) 218 hasNeighTable = False 219 missingAdpFiles = False 220 reference = None 221 noExternNeigh = False 222 bandUcdDict = {} 223 ucdBandDict = {} 224 #-------------------------------------------------------------------------- 225 # Private class parameters - should not be altered 226 227 _autoCommit = True # Overrides CuSession default 228 229 230 #-------------------------------------------------------------------------- 231 # Private member variables 232 233 _fileDir = '' 234 """ Common directory path for the ESO product files. 235 """ 236 _metaSchema = '' 237 """ Dictionary of metadata table schema by name. 238 """ 239 _newNameOfFile = None 240 """ Dictionary of ESO-style file names of images referenced by 241 their original archive names. These are the components of other images. 242 """ 243 _progSchema = '' 244 """ Dictionary of programme table schema by name. 245 """ 246 247 #-------------------------------------------------------------------------- 248
249 - def _onRun(self):
250 """ Prepares each product in turn required by the ESO-SAF for the 251 current programme. 252 """ 253 # @TODO: Get minimum ID values for all tables 254 # Get release requirements - from VSA 255 256 if self.sysc.isVSA(): 257 self.bandUcdDict = { 258 'z':'em.opt.I', 259 'y':'em.IR.NIR', 260 'j':'em.IR.J', 261 'h':'em.IR.H', 262 'ks':'em.IR.K'} 263 self.ucdBandDict = dict([(self.bandUcdDict[key], key) 264 for key in self.bandUcdDict]) 265 266 267 268 self.releaseDB = DbSession(self.releaseDBName, userName="ldservrw") 269 if not self.fieldIDs: 270 self.fieldIDs = [str(fieldID) for fieldID in self.releaseDB.query( 271 "distinct fieldID", "Required%s" % 272 self.programme.getAttr("sourceProdType"), orderBy="fieldID")] 273 entriesExistER = self.archive.queryEntriesExist("EsoRelease", 274 "programmeID=%s AND releaseNum=%s" % 275 (self.programmeID, self.releaseNum)) 276 self.newProdTypes = self.esoProductTypes 277 if entriesExistER: 278 curEsoInfo = self.archive.query("productTypes,fromDb", "EsoRelease", 279 "programmeID=%s AND releaseNum=%s" % 280 (self.programmeID, self.releaseNum), firstOnly=True) 281 self.checkEsoRelease(curEsoInfo) 282 self.esoProgReq = self.archive.query("*", "RequiredEsoRelease", 283 "programmeID=%s" % self.programmeID, firstOnly=True) 284 self.useOBdata = self.esoProgReq.incOBData == 1 285 286 self.areMosaics = self.archive.queryEntriesExist( 287 "RequiredMosaic", "programmeID=%s" % self.programmeID) 288 289 self.isRegion = self.esoProgReq.grouping == 'region' 290 if self.isRegion: 291 # @TODO: Add in region parts info for object... 292 self.setRegions() 293 self._newNameOfFile = {} 294 295 self.setUpApdList() 296 # @TODO: 297 self.setTileObjDictNonRegions() 298 299 300 301 Logger.addMessage("Parsing schema...") 302 303 self._metaSchema = dict((table.name, table) 304 for table in schema.parseTables(self.sysc.metadataSchema())) 305 306 self._progSchema = dict((table.name, table) 307 for table in schema.parseTables(self.programme.getSchemaScript())) 308 309 # Check database against schema 310 Logger.addMessage("Checking that the database schema is correct...") 311 try: 312 self.releaseDB.checkSchema( 313 tableSchema=self._metaSchema.values() + self._progSchema.values(), 314 releasedOnly=not self.releaseDB.isLoadDb) 315 316 except schema.MismatchError as error: 317 raise EsoRelease.CuError(error) 318 # @TODO: ETWS to declare appropriate SystemConstant for operations use 319 self._fileDir = ("/disk14/www-data/%s-eso/" 320 % self.programme.getAcronym().lower() 321 if (os.getenv('USER') == 'scos' and 'test' not in self.releaseDB.database.lower()) else 322 os.path.join(self.sysc.testOutputPath("eso_" + os.getenv("USER") + "2"), 323 self.programme.getAcronym().lower())) 324 utils.ensureDirExist(self._fileDir) 325 utils.ensureDirExist(os.path.join(self._fileDir, 'images')) 326 Logger.addMessage("ESO DR%s files will be created in: %s" 327 % (self.releaseNum, self._fileDir)) 328 self.directoryDict['images'] = os.path.join(self._fileDir, 'images') 329 330 # self.tilePawPrintDict = self.getTilePawPrintDict() 331 332 self.getArcFileOBDict() 333 if self.esoProductTypes & 1 > 0 and not self.skipDeepData: 334 Logger.addMessage("Querying database for WFAU-products...") 335 # Get productTiles and OB tiles if needed. 336 tiles = self.queryFileProducts(fileName=self.fileName) 337 self.setPrimaryHeaderPosition(tiles) 338 self.getProgIDListDict(tiles) 339 # @TODO: If mosaics get TL_RA, TL_DEC from Provenance - 340 if self.areMosaics: 341 #self.getTilePosDict(tiles) 342 # @FIXME: This needs to be changed 343 self.sadtTilePosDict = {} 344 for tile in tiles: 345 if tile.productID <= 5: 346 self.sadtTilePosDict[tile.fileID] = (22618.000, -44358.440, -90.000) 347 else: 348 self.sadtTilePosDict[tile.fileID] = (3748.984, -432755.080, -0.000) 349 Logger.addMessage("Selected tiles") 350 # Modified copies of deep tiles + paw-prints (images + maps + jpegs) 351 self.copyDeepTiles(tiles) 352 353 # Detection catalogue files for each of these tiles 354 self.outgestCatalogues(tiles) 355 356 # Initial Merged source catalogue files for each frame set of all tiles 357 # @TODO: temporary fix - get rid off asap 358 self.magLimSatDict = self.getMagLimSatDict() 359 if self.esoProductTypes & 2 > 0: 360 self.outgestSources(isSourceList=True) 361 362 # Final Merged source catalogue files for each frame set of all tiles 363 if self.esoProductTypes & 4 > 0: 364 self.srcCatSourceIDName = 'sourceID' 365 # @NOTE: just in rare cases for testing self.skipSources = True 366 self.outgestSources(isSourceList=False) 367 # Source light curve files for each frame set of all tiles 368 if self.esoProgReq.incVariables == 1: 369 self.outgestMultiEpoch() 370 self.outgestVariables() 371 if self.areExternalCats: 372 self.outgestExternalCats() 373 self.updateDatabase()
374 #-------------------------------------------------------------------------- 375
376 - def setUpApdList(self):
377 """ 378 """ 379 directory = "/disk47/sys/eso_adp_lists" 380 inputFileListRoot = self.programme.getAcronym().upper() 381 possFiles = [fName for fName in os.listdir(directory) 382 if inputFileListRoot in fName] 383 if possFiles: 384 latestDS = max([fName.split('.')[0].split('_')[1] for fName in possFiles]) 385 fileName = os.path.join(directory, "%s_%s.txt" % (inputFileListRoot, latestDS)) 386 self.esoArchFileDict = dict(line.split() for line in file(fileName).readlines()) 387 else: 388 raise EsoRelease.CuError("No ADP file list. Query the ESO archive " 389 "http://archive.eso.org/wdb/wdb/adp/phase3_vircam/form")
390 391 392 #-------------------------------------------------------------------------- 393
394 - def setTileObjDictNonRegions(self):
395 """ 396 """ 397 # VMC / non VMC 398 tileObjList = self.archive.query("m.multiframeID,obsName", 399 "ProgrammeFrame as p,Multiframe as m,MultiframeEsoKeys as e", 400 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 401 "m.frameType like 'tile%%stack' and m.multiframeID=e.multiframeID" 402 % self.programmeID) 403 self.tileObjectDict = dict([(mfID, (obsName.split('-')[0].upper() 404 if self.programmeID == self.sysc.scienceProgs.get("VMC") else 405 obsName)) for mfID, obsName in tileObjList])
406 407 #-------------------------------------------------------------------------- 408 409 410 #-------------------------------------------------------------------------- 411
412 - def setPrimaryHeaderPosition(self, tiles):
413 """ Produces dictionary of primary header positions. 414 """ 415 mfIDs = [tile.fileID for tile in tiles] 416 frameType = self.releaseDB.query("frameType", "Multiframe", 417 "multiframeID=%s" % mfIDs[0], firstOnly=True) 418 selectStr = "multiframeID," 419 if 'tile' in frameType or 'mosaic' in frameType: 420 selectStr += "centralRa,centralDec" 421 else: 422 selectStr += "crvalx,crvaly" 423 424 raDecs = self.releaseDB.query(selectStr, "CurrentAstrometry", 425 "multiframeID in (%s) and extNum=2" % ','.join(map(str, mfIDs))) 426 self.primaryHeaderPositionDict = dict([(mfID, (ra, dec)) 427 for mfID, ra, dec in raDecs])
428 429 #-------------------------------------------------------------------------- 430 431
432 - def getTilePosDict(self, tiles):
433 """ Dictionary of tile ra/dec positions 434 @TODO: Position angle? 435 """ 436 # @FIXME: Some horrible cockup in positions 437 438 439 440 tileMfIDs = [tile.fileID for tile in tiles if 'conf' not in tile.fileName] 441 sadtTilePos = self.releaseDB.query( 442 "distinct combiframeID,tileRa,tileDec,tileOffAngle", 443 "Multiframe as m,Provenance as v", 444 "combiframeID in (%s) and v.multiframeID=m.multiframeID" 445 " and m.tileRa>0. and tileDec>=-900000" % ','.join(map(str, tileMfIDs)) 446 ) 447 self.sadtTilePosDict = {} 448 for multID in tileMfIDs: 449 tpList = [(tlRa, tlDec, tloa) 450 for mfID, tlRa, tlDec, tloa in sadtTilePos if mfID == multID] 451 if len(tpList) == 1: 452 453 self.sadtTilePosDict[multID] = tpList.pop() 454 # Sort out conf 455 tileMfIDs = [tile.fileID for tile in tiles if 'conf' in tile.fileName] 456 sadtTilePos = self.releaseDB.query( 457 "distinct md.confID,m.tileRa,m.tileDec,m.tileOffAngle", 458 "Multiframe as md,Provenance as v,Multiframe as m", 459 "v.combiframeID=md.multiframeID and md.confID in (%s) and v.multiframeID=m.multiframeID" 460 " and m.tileRa>0. and m.tileDec>=-900000" % ','.join(map(str, tileMfIDs))) 461 for multID in tileMfIDs: 462 tpList = [(tlRa, tlDec, tloa) 463 for mfID, tlRa, tlDec, tloa in sadtTilePos if mfID == multID] 464 if len(tpList) == 1: 465 self.sadtTilePosDict[multID] = tpList.pop()
466 467 468 #-------------------------------------------------------------------------- 469
470 - def getTilePawPrintDict(self):
471 """ Dictionary of tiles for each pawprint in releaseDB 472 """ 473 # Do not include deeps! Deep tiles can have shallow pawprints in them 474 # as well as deep. 475 frameTypeSelTile = queries.getFrameSelection('tile', noDeeps=True, 476 alias='mt', selType='%stack') 477 frameTypeSelStack = queries.getFrameSelection('stack', noDeeps=True, 478 alias='mp', selType='%stack') 479 480 return dict(self.releaseDB.query( 481 "mp.fileName,mt.fileName", 482 "Provenance as v,Multiframe as mp,Multiframe as mt,ProgrammeFrame as p", 483 "mp.multiframeID=v.multiframeID and mt.multiframeID=v.combiframeID " 484 "and mt.multiframeID=p.multiframeID and p.programmeID=%s and " 485 "%s AND %s" % (self.programmeID, frameTypeSelTile, frameTypeSelStack)))
486 487 #-------------------------------------------------------------------------- 488
489 - def getArcFileOBDict(self):
490 """ Dictionary of arcFiles for each OB frame 491 """ 492 # Do not include deeps! Deep tiles can have shallow pawprints in them 493 # as well as deep. 494 # @FIXME: Get all normal arcFiles 495 496 # Do tiles and stacks separately 497 # Confidence more difficult too. 498 499 self.arcFileOBDict = defaultdict(list) 500 501 frameTypeSel = queries.getFrameSelection('stack', noDeeps=True, 502 alias='m', selType='%stack') 503 504 arcFileList = self.releaseDB.query( 505 "m.fileName,m2.arcFile,mc.arcFile", 506 "Multiframe as m,ProgrammeFrame as p,Provenance as v," 507 "Multiframe as m2, Multiframe as mc", 508 "m.multiframeID=p.multiframeID and p.programmeID=%s and %s and " 509 "v.combiframeID=m.multiframeID and v.multiframeID=m2.multiframeID " 510 "and mc.multiframeID=m2.confID" % 511 (self.programmeID, frameTypeSel)) 512 uniqueFileNames = set([fName for fName, _aFile, _aFileC in arcFileList]) 513 for fileName in uniqueFileNames: 514 self.arcFileOBDict[fileName] = [aFile for fName, aFile, _aFileC in arcFileList 515 if fName == fileName] 516 self.arcFileOBDict[fits.getConfMap(fileName)] = [aFileC for fName, _aFile, aFileC in arcFileList 517 if fName == fileName] 518 frameTypeSel = queries.getFrameSelection('tile', noDeeps=True, 519 alias='m', selType='%stack') 520 521 arcFileList = self.releaseDB.query( 522 "m.fileName,m2.arcFile,mc.arcFile", 523 "Multiframe as m,ProgrammeFrame as p,Provenance as v," 524 "Provenance as v2,Multiframe as m2, Multiframe as mc", 525 "m.multiframeID=p.multiframeID and p.programmeID=%s and %s and " 526 "v.combiframeID=m.multiframeID and v.multiframeID=v2.combiframeID " 527 "and v2.multiframeID=m2.multiframeID and mc.multiframeID=m2.confID" % 528 (self.programmeID, frameTypeSel)) 529 uniqueFileNames = set([fName for fName, _aFile, _aFileC in arcFileList]) 530 for fileName in uniqueFileNames: 531 self.arcFileOBDict[fileName] = [aFile for fName, aFile, _aFileC in arcFileList 532 if fName == fileName] 533 self.arcFileOBDict[fits.getConfMap(fileName)] = [aFileC for fName, _aFile, aFileC in arcFileList 534 if fName == fileName]
535 536 537 #-------------------------------------------------------------------------- 538 539
540 - def getMagLimSatDict(self):
541 """ 542 """ 543 544 productType = 'mosaic' if self.areMosaics else 'tile' 545 fieldIDStr = (" and r.fieldID in (%s)" % ','.join(map(str, self.fieldIDs)) 546 if self.fieldIDs else "") 547 548 mfIDs = self.releaseDB.query( 549 "m.multiframeID", 550 "ProgrammeFrame as p,Required%s as r,Multiframe as m" % productType, 551 "r.programmeID=%s and r.programmeID=p.programmeID and " 552 "r.productId=p.productID and p.multiframeID=m.multiframeID and " 553 "m.deprecated=0 and m.frameType like '%%%s%%stack'%s" 554 % (self.programmeID, productType, fieldIDStr)) 555 # For VVV, more complicated - want colours from single-epoch... 556 # @TODO: Best get all mfIDs for now and sort out in next release 557 if self.programmeID == 120: 558 mfIDs = self.releaseDB.query( 559 "m.multiframeID", 560 "ProgrammeFrame as p,Multiframe as m", 561 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 562 "m.deprecated in (0,68) and m.frameType like '%%%s%%stack'" 563 % (self.programmeID, productType)) 564 # Check releaseDB 565 fileNameList = self.releaseDB.query( 566 selectStr="fileName+'s.fz',catName", 567 fromStr="Multiframe", 568 whereStr="multiframeID in (%s)" % ','.join(map(str, mfIDs))) 569 missingAdpFiles = [] 570 for fileName, catName in fileNameList: 571 origName = os.path.basename(fileName) 572 if not origName in self.esoArchFileDict: 573 missingAdpFiles.append(origName + '\n') 574 origName = os.path.basename(catName) 575 if not origName in self.esoArchFileDict: 576 missingAdpFiles.append(origName + '\n') 577 if missingAdpFiles: 578 outFileName = "/disk47/sys/eso_adp_lists/missing%s.list" % self.programme.getAcronym() 579 file(outFileName, "w").writelines(missingAdpFiles) 580 # raise EsoRelease.CuError("Missing ADP Files: %s " % outFileName) 581 releaseDBRes = self.releaseDB.query( 582 selectStr="multiframeID,abMagLim,abSatMag", 583 fromStr="MultiframeDetector", 584 whereStr="multiframeID in (%s) and abMagLim>0 and abSatMag>0" 585 % ','.join(map(str, mfIDs))) 586 587 missMfIDs = set(mfIDs).difference(set(rdb.multiframeID for rdb in releaseDBRes)) 588 # Check mainDB 589 mainDBRes = self.archive.query( 590 selectStr="multiframeID,abMagLim,abSatMag", 591 fromStr="MultiframeDetector", 592 whereStr="multiframeID in (%s) and abMagLim>0 and abSatMag>0" 593 % ','.join(map(str, missMfIDs))) 594 missMfIDs = missMfIDs.difference(set(mdb.multiframeID for mdb in mainDBRes)) 595 # Calc 596 calcRes = [] 597 progress = ForLoopMonitor(missMfIDs) 598 Logger.addMessage("Calculating new magnitude limits and saturation limits...") 599 for mfID in missMfIDs: 600 abMagLim = self.getAbMagLim(mfID, 2) 601 abSatMag = self.getSatLimit(mfID, 2) 602 # update VSA... 603 self.archive.update("MultiframeDetector", 604 [("abMagLim", abMagLim), ("abSatMag", abSatMag)], 605 where="multiframeID=%s and extNum=2" % mfID) 606 calcRes.append((mfID, abMagLim, abSatMag)) 607 progress.testForOutput("") 608 allRes = releaseDBRes + mainDBRes + calcRes 609 return dict([(mfID, (abMagLim, abSatMag)) 610 for mfID, abMagLim, abSatMag in allRes])
611 612 #-------------------------------------------------------------------------- 613
614 - def areExternalCats(self):
615 """ Are there external catalogues? 616 """ 617 # archive or releaseDB? Dangers both ways... 618 619 return self.archive.queryEntriesExist( 620 "ExternalProduct", "programmeID=%s AND productType != 'mosaic'" % 621 self.programmeID)
622 #-------------------------------------------------------------------------- 623
624 - def copyDeepTiles(self, tiles):
625 """ 626 Copy WFAU-produced deep tile image and confidence map FITS files to the 627 ESO-SAF transfer staging area. Modifying headers to meet the ESO-SAF 628 specifications. 629 630 """ 631 fieldOfID = dict((tile.fileID, tile) for tile in tiles) 632 633 imageOfConf = \ 634 dict((tile.confID, tile.fileID) for tile in tiles 635 if 'conf' not in tile.frameType) 636 fnFormat = \ 637 FileNameFormat(self.programme, self.releaseNum, 638 os.path.join(self._fileDir, 'images')) 639 640 filesToCopy = [] 641 Logger.addMessage("%s tiles " % len(tiles)) 642 newFilePathDict = dict([(tile, fnFormat.getFilePath(tile)) for tile in tiles]) 643 for tile in tiles: 644 self._newNameOfFile[tile.fileName] = newFilePathDict[tile] 645 646 if self.isRegion: 647 648 self.regionDict[tile.fileID] = self.getRegion(self, 649 raDec=[tile.ra, tile.dec], useFrameSetID=False) 650 self.tileObjectDict[tile.fileID] = self.programme.getAcronym().upper() + "/" + self.regionDict[tile.fileID][1] 651 652 653 654 for tile in tiles: 655 newFilePath = newFilePathDict[tile] 656 isDeep = 'deep' in newFilePath 657 # isProduct = tile.productID > 0 658 self.updateDirDict(newFilePath) 659 jpegFiles = [(jpegFile.compFile, 660 fnFormat.getFilePath(tile, fileType="jpeg", 661 extNum=jpegFile.extNum)) 662 for jpegFile in self.queryJpegFiles(tile)] 663 jpegFilesConf = [] 664 if tile.confID > 0: 665 jpegFilesConf = [(jpegFile.compFile, 666 fnFormat.getFilePath(tile, fileType="jpeg", 667 extNum=jpegFile.extNum, 668 useConf=True)) 669 for jpegFile in self.queryJpegFiles(tile, 670 isConf=True)] 671 tile = tile._replace( 672 confID=fnFormat.getFilePath(fieldOfID[tile.confID])) 673 674 675 if not os.path.exists(newFilePath): 676 filesToCopy.append((tile, newFilePath, jpegFiles, 677 jpegFilesConf)) 678 679 # Query database for deep paw-print components of deep tiles 680 # IF IS PRODUCT... 681 ppOffsets = None 682 if isDeep: 683 ppOffsets = dict(self.releaseDB.query( 684 selectStr="m2.%s, offsetPos" % ("confID" 685 if 'conf' in tile.frameType else "multiframeID"), 686 fromStr="Provenance AS pv, Multiframe AS m1, Multiframe AS m2" 687 ", RequiredStack AS s, ProgrammeFrame AS p", 688 whereStr=("m1.confID=%s" % tile.fileID 689 if 'conf' in tile.frameType else 690 "m1.multiframeID=%s" % tile.fileID) + 691 " AND m1.multiframeID=pv.combiframeID" 692 " AND pv.multiframeID=m2.multiframeID" 693 " AND m2.frameType LIKE '%%stack'" 694 " AND p.multiframeID=m2.multiframeID" 695 " AND p.programmeID=%s AND p.programmeID=s.programmeID" 696 " AND p.productID=s.productID" 697 % (self.programmeID))) 698 # elif not isDeep and self.useOBdata: 699 # # @TODO: Don't use OB pawprints 700 # # @TODO: How many layers? 701 # # OB confs do not have productIDs 702 # # Use Provenance - multiframeID, use offsetX,offsetY to get offPos 703 # offsets = self.releaseDB.query( 704 # selectStr="m.multiframeID, m.offsetX, m.offsetY", 705 # fromStr="Provenance AS pv,Multiframe AS m", 706 # whereStr="pv.multiframeID=m.multiframeID AND " 707 # "pv.combiframeID = %s" % tile.fileID) 708 # ppOffsets = dict((mfID, astro.getOffSetPos(offX, offY)) 709 # for mfID, offX, offY in offsets) 710 if ppOffsets: 711 # @TODO: Move this to a single query earlier - more efficient 712 mfIDs = [key for key in ppOffsets] 713 for deepPawPrint in self.queryFileProducts(mfIDs=mfIDs, isPawPrint=True): 714 newFilePath = fnFormat.getFilePath(deepPawPrint, 715 offsetPos=ppOffsets[deepPawPrint.fileID]) 716 self.updateDirDict(newFilePath) 717 jpegFilesConf = [] 718 jpegFiles = [(jpegFile.compFile, 719 fnFormat.getFilePath(deepPawPrint, 720 fileType="jpeg", 721 offsetPos=ppOffsets[deepPawPrint.fileID], 722 extNum=jpegFile.extNum)) 723 for jpegFile in self.queryJpegFiles(deepPawPrint) 724 if self.copyJpegsPaws] 725 self._newNameOfFile[deepPawPrint.fileName] = newFilePath 726 fieldOfID[deepPawPrint.fileID] = deepPawPrint 727 if 'conf' not in deepPawPrint.frameType: 728 imageOfConf[deepPawPrint.confID] = deepPawPrint.fileID 729 # Assign name of confidence map file to image file details 730 if deepPawPrint.confID > 0: 731 jpegFilesConf = [(jpegFile.compFile, 732 fnFormat.getFilePath(deepPawPrint, 733 fileType="jpeg", 734 offsetPos=ppOffsets[deepPawPrint.fileID], 735 extNum=jpegFile.extNum, useConf=True)) 736 for jpegFile in self.queryJpegFiles(deepPawPrint, 737 isConf=True) 738 if self.copyJpegsPaws] 739 if deepPawPrint.fileID in ppOffsets and deepPawPrint.confID in fieldOfID: 740 deepPawPrint = deepPawPrint._replace( 741 confID=fnFormat.getFilePath( 742 field=fieldOfID[deepPawPrint.confID], 743 offsetPos=ppOffsets[deepPawPrint.fileID])) 744 else: 745 deepPawPrint = deepPawPrint._replace( 746 confID=None) 747 748 if not os.path.exists(newFilePath): 749 filesToCopy.append((deepPawPrint, newFilePath, jpegFiles, 750 jpegFilesConf)) 751 752 Logger.addMessage("Copying %s WFAU-products..." % len(filesToCopy)) 753 filesToCopy.sort(key=itemgetter(1)) # sort by file name 754 progress = ForLoopMonitor(filesToCopy) 755 for field, newFilePath, jpegFiles, jpegFilesConf in filesToCopy: 756 # Copy JPEGs first in case of error 757 newJpegFiles = self.copyJpegFiles(jpegFiles, field) 758 newJpegFilesConf = [newjp for _oldjp, newjp in jpegFilesConf] 759 # Copy WFAU-deep-product 760 shutil.copy2(field.fileName, newFilePath) 761 762 763 # Modify header in an exception trap to ensure completion 764 try: 765 # 25% overhead but I/O limited, so parallelisation doesn't help 766 767 fits.uncompressFits([newFilePath]) 768 if not os.path.exists(newFilePath): 769 os.rename(newFilePath.split('.fz')[0], newFilePath) 770 771 # Take header metadata from image for confidence files 772 if field.frameType.endswith("conf"): 773 field = field._replace(confID=field.fileName) 774 field = field._replace(fileID=imageOfConf[field.fileID]) 775 field = field._replace( 776 fileName=fieldOfID[field.fileID].fileName) 777 self.modHeaders(field, newFilePath, newJpegFiles, newJpegFilesConf) 778 779 except: 780 # If incomplete, then remove, so will be recreated next time 781 if os.path.exists(newFilePath): 782 os.remove(newFilePath) 783 784 raise 785 786 progress.testForOutput()
787 788 #-------------------------------------------------------------------------- 789
790 - def copyJpegFiles(self, jpegFiles, field):
791 """ Copies the given list of JPEG files to the ESO release staging 792 area. 793 """ 794 newJpegFiles = [] 795 for oldJpegPath, newJpegPath in jpegFiles: 796 newJpegFiles.append(newJpegPath) 797 if not os.path.exists(newJpegPath): 798 if oldJpegPath == dbc.charDefault(): 799 raise EsoRelease.CuError("JPEG entries in the database are" 800 " incomplete for multiframeID %s (filename: %s)" 801 % (field.fileID, field.fileName)) 802 803 shutil.copy2(oldJpegPath, newJpegPath) 804 return newJpegFiles
805 806 #-------------------------------------------------------------------------- 807
808 - def jpegCards(self, jpegFile, asNum):
809 """ @return: A list of FITS header card details for the given JPEG file. 810 @rtype: list(tuple(str, PyValue, str)) 811 """ 812 md5Sum = hashlib.md5(open(jpegFile, 'rb').read()).hexdigest() 813 jpegFile = os.path.basename(jpegFile) 814 815 return [('ASSON%d' % asNum, jpegFile, 816 self.getStdShortDesc(jpegFile, 'Name of associated file')), 817 818 ('ASSOC%d' % asNum, 'ancillary.preview'.upper(), 819 'Category of associated file'), 820 821 ('ASSOM%d' % asNum, md5Sum, self.getStdShortDesc(md5Sum, 822 'MD5 checksum of associated file'))]
823 824 #-------------------------------------------------------------------------- 825
826 - def getProgIDListDict(self, tiles):
827 """ 828 For each product, get a list of PROG_IDs from inputs 829 830 MultiframeEsokeys.obsProgID 831 """ 832 self.progIDListDict = defaultdict(list) 833 if self.areMosaics: 834 prodMfIDList = [tile.fileID for tile in tiles if 'conf' not in tile.fileName] 835 progIDList = self.releaseDB.query( 836 "distinct v.combiframeID,obsProgID", 837 "Provenance as v,MultiframeEsoKeys as e", 838 "combiframeID in (%s) and v.multiframeID=e.multiframeID" 839 % ','.join(map(str, prodMfIDList))) 840 841 for multiframeID in prodMfIDList: 842 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 843 if mfID == multiframeID] 844 # Sort out conf 845 prodMfIDList = [tile.fileID for tile in tiles if 'conf' in tile.fileName] 846 progIDList = self.releaseDB.query( 847 "distinct m.confID,obsProgID", 848 "Multiframe as m,Provenance as v,MultiframeEsoKeys as e", 849 "v.combiframeID=m.multiframeID and m.confID in (%s) and v.multiframeID=e.multiframeID" 850 % ','.join(map(str, prodMfIDList))) 851 for multiframeID in prodMfIDList: 852 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 853 if mfID == multiframeID] 854 else: 855 # Go down to lower level... 856 prodMfIDList = [tile.fileID for tile in tiles 857 if 'conf' not in tile.fileName and '_dp' in tile.fileName] 858 # deep tiles 859 if len(prodMfIDList) > 0: 860 progIDList = self.releaseDB.query( 861 "distinct v.combiframeID,obsProgID", 862 "Provenance as v,Provenance as v2,Multiframe as m,MultiframeEsoKeys as e", 863 "v.combiframeID in (%s) and v.multiframeID=v2.combiframeID and " 864 "m.multiframeID=v2.combiframeID and m.frameType='deepstack' and " 865 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 866 for multiframeID in prodMfIDList: 867 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 868 if mfID == multiframeID] 869 # deep stacks. 870 progIDList = self.releaseDB.query( 871 "distinct v2.combiframeID,obsProgID", 872 "Provenance as v,Provenance as v2,Multiframe as m,MultiframeEsoKeys as e", 873 "v.combiframeID in (%s) and v.multiframeID=v2.combiframeID and " 874 "m.multiframeID=v2.combiframeID and m.frameType='deepstack' and " 875 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 876 deepStackMfIDs = set([mfID for mfID, _opi in progIDList]) 877 for multiframeID in deepStackMfIDs: 878 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 879 if mfID == multiframeID] 880 # tiles. 881 prodMfIDList = [tile.fileID for tile in tiles 882 if 'conf' not in tile.fileName and '_dp' not in tile.fileName] 883 if len(prodMfIDList) > 0: 884 progIDList = self.releaseDB.query( 885 "distinct v.combiframeID,obsProgID", 886 "Provenance as v,MultiframeEsoKeys as e", 887 "combiframeID in (%s) and v.multiframeID=e.multiframeID" 888 % ','.join(map(str, prodMfIDList))) 889 for multiframeID in prodMfIDList: 890 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 891 if mfID == multiframeID] 892 # deep tile conf 893 894 prodMfIDList = [tile.fileID for tile in tiles 895 if 'conf' in tile.fileName and '_dp' in tile.fileName] 896 if len(prodMfIDList) > 0: 897 progIDList = self.releaseDB.query( 898 "distinct m.confID,obsProgID", 899 "Provenance as v,Provenance as v2,Multiframe as m,Multiframe as m2," 900 "MultiframeEsoKeys as e", 901 "v.combiframeID=m.multiframeID and m.confID in (%s) and " 902 "v.multiframeID=v2.combiframeID and " 903 "m2.multiframeID=v2.combiframeID and m2.frameType='deepstack' and " 904 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 905 for multiframeID in prodMfIDList: 906 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 907 if mfID == multiframeID] 908 909 # deep stack conf 910 prodMfIDList = [tile.fileID for tile in tiles 911 if 'conf' not in tile.fileName and '_dp' in tile.fileName] 912 if len(prodMfIDList) > 0: 913 progIDList = self.releaseDB.query( 914 "distinct m2.confID,obsProgID", 915 "Provenance as v,Provenance as v2,Multiframe as m2," 916 "MultiframeEsoKeys as e", 917 "v.combiframeID in (%s) and " 918 "v.multiframeID=v2.combiframeID and " 919 "m2.multiframeID=v2.combiframeID and m2.frameType='deepstack' and " 920 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 921 deepConfMfIDs = set([mfID for mfID, _opi in progIDList]) 922 for multiframeID in deepConfMfIDs: 923 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 924 if mfID == multiframeID] 925 # tile conf 926 prodMfIDList = [tile.fileID for tile in tiles 927 if 'conf' in tile.fileName and '_dp' not in tile.fileName] 928 if len(prodMfIDList) > 0: 929 progIDList = self.releaseDB.query( 930 "distinct m.co3nfID,obsProgID", 931 "Multiframe as m,Provenance as v,MultiframeEsoKeys as e", 932 "v.combiframeID=m.multiframeID and m.confID in (%s) and v.multiframeID=e.multiframeID" 933 % ','.join(map(str, prodMfIDList))) 934 for multiframeID in prodMfIDList: 935 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 936 if mfID == multiframeID] 937 # ANd use frameset - mergelog 938 frameSetIDs = ','.join( 939 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 940 for fieldID in self.fieldIDs) 941 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 942 mfIDCols = ','.join([column.name for column in mergeLog.columns if "mfID" in column.name]) 943 mergeLogInfo = self.releaseDB.query("frameSetID,%s" % mfIDCols, 944 self.programme.getMergeLogTable(), "frameSetID in (%s)" % frameSetIDs) 945 for mlInfo in mergeLogInfo: 946 frameSetID = mlInfo[0] 947 progIDList = set() 948 for mfID in mlInfo[1:]: 949 progIDList = progIDList.union(self.progIDListDict[mfID]) 950 self.progIDListDict[frameSetID] = list(progIDList)
951 952 953 #-------------------------------------------------------------------------- 954 955
956 - def modHeaders(self, field, filePath, jpegFiles, jpegFilesConf):
957 """ 958 Modifies headers of the given WFAU-produced FITS file to meet the 959 ESO-SAF specifications. 960 961 """ 962 delKeysList = [] 963 isConf = field.frameType.endswith("conf") 964 isPawPrint = '_off' in filePath 965 isMosaic = '_mosaic' in filePath 966 isDeep = '_deep' in filePath 967 # isTile = '_tile' in filePath 968 Logger.addMessage("Modifying header for " + filePath, alwaysLog=False) 969 970 delPrimKeys = set([] if isPawPrint else ["MJD-OBS", "MJD-END"]) 971 delExtKeys = set([]) 972 # Get data from archive 973 primaryInfo = self.releaseDB.query( 974 selectStr="instrument, filterName, object, creationDate, equinox" 975 ", raDecSys, expTime, obsProgID, dprTech, njitter, noffsets" 976 ", nustep, sadtSurveyID, tileRa, tileDec, tileOffAngle" 977 ", project, obsName, detNdit, casuVers", 978 fromStr=Join(["Multiframe", "MultiframeEsoKeys"], "multiframeID"), 979 whereStr="Multiframe.multiframeID=%s" % field.fileID, 980 firstOnly=True) 981 982 extensInfo = self.releaseDB.query( 983 selectStr="MultiframeDetector.extNum, photZPCat, photZPErrCat" 984 ", totalExpTime, abMagLim, seeing, avStellarEll" 985 ", xPixSize, yPixSize, deprecated", 986 fromStr=Join(["MultiframeDetector", "CurrentAstrometry"], 987 ["multiframeID", "extNum"]), 988 whereStr="MultiframeDetector.multiframeID=%s" % field.fileID, 989 orderBy="MultiframeDetector.extNum") 990 991 # @TODO: is this necessary now? 992 provInfo = Provenance(self.releaseDB, field.fileName) 993 epsReg = primaryInfo.project 994 objectStr = (primaryInfo.obsName.split('-')[0].upper() 995 if self.programmeID == self.sysc.scienceProgs.get("VMC") else 996 primaryInfo.obsName) 997 if self.programmeID == self.sysc.scienceProgs.get("VMC"): 998 epsReg += '-' + objectStr 999 1000 elif field and self.isRegion and self.regionDict[field.fileID]: 1001 epsReg += '/' + self.regionDict[field.fileID][0] 1002 objectStr = primaryInfo.project + "/" + self.regionDict[field.fileID][1] 1003 self.tileObjectDict[field.fileID] = objectStr 1004 # @FIXME: Remove code duplication and tidy from this point on. 1005 1006 1007 1008 tIntpPix = numpy.median([ext.totalExpTime for ext in extensInfo 1009 if ext.deprecated == 0]) 1010 1011 1012 primaryCards = [ 1013 ("ORIGIN", "ESO-PARANAL", "European Southern Observatory") 1014 , ("DATE", 1015 fits.formatDateTime(primaryInfo.creationDate, isDiffSec=False), 1016 "Date the file was written") 1017 1018 , ('TELESCOP', "ESO-VISTA", 'ESO Telescope designation') 1019 , ('INSTRUME', primaryInfo.instrument, 'Instrument name') 1020 , ('FILTER', primaryInfo.filterName, 'Filter name') 1021 , ('OBJECT', objectStr, 'Target designation') 1022 , ('EQUINOX', primaryInfo.equinox, 'Standard FK5 (years)') 1023 , ('RADECSYS', 'FK5', 'Coordinate reference frame') 1024 , ('EXPTIME', round(tIntpPix, 1), 1025 'Total integration time per pixel (s)') 1026 1027 , ('TEXPTIME', float(provInfo.getSumTotalExpTime() or tIntpPix), 1028 'Total integration time of all exposures (s)') 1029 1030 , ('MJD-OBS', provInfo.getMjdObs(), 'Start of observations (days)') 1031 , ('MJD-END', provInfo.getMjdEnd(), 'End of observations (days)') 1032 , ("DATE-OBS", 1033 fits.formatDateTime(provInfo.getDateObs()), 1034 "Date the observation was started (UTC)")] 1035 1036 if len(self.progIDListDict[field.fileID]) > 1: 1037 primaryCards += [('PROG_ID', 'MULTI', 'ESO programme identification')] 1038 for ii, opi in enumerate(self.progIDListDict[field.fileID]): 1039 primaryCards += [('PROGID%s' % (ii + 1), opi, 1040 'ESO programme identification')] 1041 else: 1042 primaryCards += [('PROG_ID', self.progIDListDict[field.fileID][0], 1043 'ESO programme identification')] 1044 for ind, oblock in enumerate(provInfo.getObsIDList()): 1045 primaryCards.append( 1046 ('OBID%d' % (ind + 1), oblock, 'Observation block ID')) 1047 1048 primaryCards += [ 1049 ('M_EPOCH', True, 'TRUE if resulting from multiple epochs'), 1050 ('SINGLEXP', False, 'TRUE if resulting from single exposure'), 1051 ('NCOMBINE', provInfo.getNoRawFrames(), 1052 '# of combined raw science data files')] 1053 # index = 1 1054 provFileList = provInfo.getPrevLayer() if isDeep else self.arcFileOBDict[field.fileName] 1055 finalProvList = [] 1056 for provFile in provFileList: 1057 # Use origFile 1058 if not isDeep: 1059 pvFileName = provFile 1060 desc = self.getStdShortDesc(pvFileName, 'Originating raw science file') 1061 finalProvList.append((pvFileName, desc)) 1062 1063 else: 1064 pvFileName = provFile.name 1065 if isConf: 1066 pvFileName = fits.getConfMap(pvFileName) 1067 if (self.sysc.deepSuffix not in pvFileName and 1068 self.sysc.tileSuffix not in pvFileName and self.sysc.isVSA()): 1069 pvFileNameList = self.arcFileOBDict[pvFileName] 1070 for pvFileName in pvFileNameList: 1071 desc = self.getStdShortDesc(pvFileName, 'Originating raw science file') 1072 finalProvList.append((pvFileName, desc)) 1073 1074 else: 1075 pvFileName = self._newNameOfFile[pvFileName] 1076 pvFileName = os.path.basename(pvFileName) 1077 desc = self.getStdShortDesc(pvFileName, 'Originating science product file') 1078 pvFileName = pvFileName + "s.fz" if "s.fz" not in pvFileName else pvFileName 1079 finalProvList.append((pvFileName, desc)) 1080 1081 1082 finalProvList = sorted(list(set(finalProvList))) 1083 for index, (pvFileName, desc) in enumerate(finalProvList): 1084 primaryCards.append(('PROV%d' % (index + 1), pvFileName, desc)) 1085 1086 obsTech = ('IMAGE,JITTER' if isMosaic and primaryInfo.dprTech == 'NONE' 1087 else primaryInfo.dprTech.replace('&#044;', ',')) 1088 primaryCards.append(('OBSTECH', obsTech, "Technique of observation")) 1089 1090 if not isConf: 1091 primaryCards.append(('PRODCATG', 1092 ('science.image'.upper() if not isPawPrint else 'science.MEFimage'.upper()), 1093 'Data product category')) 1094 1095 primaryCards += [ 1096 ("IMATYPE", "TILE" if not isPawPrint else "PAWPRINT", 1097 "Specific image type"), 1098 1099 ("ISAMP", isPawPrint, 1100 "TRUE if image represents partially sampled sky"), 1101 1102 ("FLUXCAL", "ABSOLUTE", 'Certifies the validity of PHOTZP')] 1103 1104 1105 1106 # primaryCards.append(('PROCSOFT',softVers,'Reduction software version')) 1107 reference = EsoRelease.refForProg[self.programme.getAcronym().upper()] 1108 primaryCards.append(('REFERENC', reference, 'Bibliographic reference')) 1109 if not isConf: 1110 asson = os.path.basename(field.confID) 1111 desc = self.getStdShortDesc(asson, 'Name of associated file') 1112 primaryCards += [('ASSON1', asson, desc), 1113 ('ASSOC1', 'ancillary.weightmap'.upper(), 'Category of associated file')] 1114 1115 if not isPawPrint and not isConf: 1116 primaryCards += \ 1117 self.jpegCards(jpegFiles[0], asNum=2) 1118 primaryCards += \ 1119 self.jpegCards(jpegFilesConf[0], asNum=3) 1120 tilePos = self.calcTilePos(field.fileID, isPawPrint) 1121 tileRa = (primaryInfo.tileRa if (primaryInfo.tileRa > 0. and not isMosaic) else 1122 tilePos.tileRa) 1123 1124 tileDec = (primaryInfo.tileDec if (primaryInfo.tileDec >= -900000. and not isMosaic) else 1125 tilePos.tileDec) 1126 1127 tileOffAngle = round(primaryInfo.tileOffAngle 1128 if primaryInfo.tileOffAngle >= -900000. else 1129 tilePos.tileOffAngle, 3) 1130 if isMosaic: 1131 tileRa, tileDec, tileOffAngle = self.sadtTilePosDict[field.fileID] 1132 1133 # @FIXME: Why are we rounding the precision? Is this a requirement of 1134 # ESO or is it just to save space? If the latter it won't work 1135 # because Python always allocates maximum precision for floats. 1136 1137 1138 primaryCards += [ 1139 ("TL_RA", round(tileRa, 3), "Tile RA [HHMMSS.TTT]"), 1140 ("TL_DEC", round(tileDec, 3), "Tile Declination [DDMMSS.TTT]"), 1141 ("TL_OFFAN", tileOffAngle, "Tile rotator offset angle [deg]")] 1142 1143 # @FIXME: regional set up... 1144 1145 primaryCards.append( 1146 ('EPS_REG', epsReg, 'ESO public survey region name')) 1147 if provInfo.isSameJitterPattern() and primaryInfo.njitter > 0: 1148 primaryCards += [ 1149 ("NJITTER", primaryInfo.njitter, "Number of jitter positions"), 1150 ("NOFFSETS", primaryInfo.noffsets, "Number of offset positions"), 1151 ("DIT", primaryInfo.expTime, "Integration Time")] 1152 else: 1153 delPrimKeys.update(["NJITTER", "NOFFSETS", "DIT", "NDIT"]) 1154 delKeysList.extend(["NJITTER", "NOFFSETS", "DIT", "NDIT"]) 1155 primaryCards += [ 1156 ("NUSTEP", primaryInfo.nustep, "Number of microstep positions"), 1157 ("ORIGFILE", 1158 os.path.basename(field.confID if isConf else field.fileName), 1159 "Original WFAU filename")] 1160 if "deep" not in field.frameType: 1161 primaryCards += [("NDIT", primaryInfo.detNdit, "Number of dits")] 1162 1163 jpegAsNum = 1 if isConf else 2 1164 extensionCards = {} 1165 for extNo, extInfo in enumerate(extensInfo): # , jpegFile in zip(extensInfo, jpegFiles): 1166 # Astrometry should be automatic 1167 photZPEso = extInfo.photZPCat 1168 photZPEso += 2.5 * math.log10(primaryInfo.expTime) 1169 if self.esoProgReq.photSys == 'AB': 1170 vegaToAB = self.releaseDB.query("vegaToAB", "Filter", "shortName='%s'" 1171 % primaryInfo.filterName, firstOnly=True) 1172 photZPEso += vegaToAB 1173 abSatLimit = self.getSatLimit(field.fileID, extInfo.extNum) 1174 seeing = extInfo.seeing 1175 seeing *= math.sqrt(extInfo.xPixSize * extInfo.yPixSize) 1176 abMagLim = extInfo.abMagLim if extInfo.abMagLim > 0. else self.getAbMagLim(field.fileID, extInfo.extNum) 1177 1178 extCards = [ 1179 ("EXTVER", extInfo.extNum - 1, "FITS Extension version"), 1180 ("INHERIT", True, "Primary header keywords are inherited"), 1181 ("BUNIT", "ADU", "Physical unit of array values"), 1182 ("PHOTZP", round(photZPEso, 3), "Photometric zeropoint"), 1183 ("PHOTZPER", round(extInfo.photZPErrCat, 3), 1184 "Uncertainty on PHOTZP"), 1185 1186 ("PHOTSYS", self.esoProgReq.photSys, "Photometric system"), 1187 ("ABMAGLIM", round(abMagLim, 3), 1188 "5-sigma limiting magnitude for point sources"), 1189 1190 ("ABMAGSAT", round(abSatLimit, 3), 1191 "Saturation limit for point sources (AB mags)"), 1192 1193 ("PSF_FWHM", round(seeing, 4), "Spatial resolution (arcsec)"), 1194 ("ELLIPTIC", round(extInfo.avStellarEll, 4), 1195 "Average ellipticity of point sources")] 1196 # Update VSA with abMagLim and abSatLim 1197 self.archive.update( 1198 "MultiframeDetector", [("abMagLim", abMagLim), ("abSatMag", abSatLimit)], 1199 where="multiframeID=%s and extNum=%s" % (field.fileID, extInfo.extNum)) 1200 if isPawPrint and not isConf and self.copyJpegsPaws: 1201 primaryCards += self.jpegCards(jpegFiles[extNo], jpegAsNum) 1202 jpegAsNum += 1 1203 extensionCards[extInfo.extNum] = extCards 1204 1205 # Remove unnecessary cards. 1206 for ext in extensionCards: 1207 delPrimKeys.update(card[0] for card in extensionCards[ext]) 1208 1209 delExtKeys.update(card[0] for card in primaryCards) 1210 delExtKeys.update(["MAGZPT", "MAGZRR"]) 1211 1212 1213 if isMosaic: 1214 fitsFile = fits.open(filePath) 1215 hdulist = pyfits.HDUList(pyfits.PrimaryHDU(fitsFile[1].data)) 1216 for ext in range(2): 1217 for card in fitsFile[ext].header.ascardlist(): 1218 if card.key != 'ORIGNAME' and card.key != 'CONTINUE': 1219 self.addCard(hdulist[0].header, card.key 1220 if not card.key.startswith('ESO') else 'HIERARCH ' + card.key, 1221 card.value, card.comment) 1222 # Update with image centre..... 1223 raDec = self.primaryHeaderPositionDict[field.fileID] 1224 1225 if card.key == 'RA': 1226 comment = 'RA (J2000) image centre (deg)' 1227 self.addCard(hdulist[0].header, card.key, raDec[0], comment) 1228 if card.key == 'DEC': 1229 comment = 'DEC (J2000) image centre (deg)' 1230 self.addCard(hdulist[0].header, card.key, raDec[1], comment) 1231 hdulist.writeto(filePath.replace('.fz', '')) 1232 os.remove(filePath) 1233 os.rename(filePath.replace('.fz', ''), filePath) 1234 fitsFile = fits.open(filePath, 'update') 1235 for ii, hdu in enumerate(fitsFile): 1236 removeKeys = \ 1237 [key for key in hdu.header if 'PROV' in key or 'ESO ' in key] 1238 1239 for key in removeKeys: 1240 del hdu.header[key] 1241 1242 # Uncomment if using with cfitsio<3280 1243 # if ii is 0: # Primary - no keys for tiles, only paw-prints 1244 # if isPawPrint: 1245 # for key, value, desc in primaryCards: 1246 # hdu.header.update(key, value, desc) 1247 # 1248 # for key in delPrimKeys: 1249 # if key in hdu.header: 1250 # del hdu.header[key] 1251 # else: 1252 # extNum = ii + 1 1253 # for key, value, desc in extensionCards[extNum]: 1254 # hdu.header.update(key, value, desc) 1255 # 1256 # if not isPawPrint: 1257 # for key, value, desc in primaryCards: 1258 # hdu.header.update(key, value, desc) 1259 # else: 1260 # for key in delExtKeys: 1261 # if key in hdu.header: 1262 # del hdu.header[key] 1263 # 1264 # hdu.header.add_comment("Image created by WFAU") 1265 1266 # Comment out if using cfitsio<3280 1267 if isPawPrint: 1268 if ii is 0: 1269 for key, value, desc in primaryCards: 1270 self.addCard(hdu.header, key, value, desc) 1271 1272 for key in delPrimKeys: 1273 if key in hdu.header: 1274 del hdu.header[key] 1275 hdu.header.add_comment("Image created by WFAU") 1276 else: 1277 extNum = ii + 1 1278 for key, value, desc in extensionCards[extNum]: 1279 self.addCard(hdu.header, key, value, desc) 1280 1281 for key in delExtKeys: 1282 if key in hdu.header: 1283 del hdu.header[key] 1284 else: 1285 for key in delPrimKeys: 1286 if key in hdu.header: 1287 del hdu.header[key] 1288 1289 for key, value, desc in primaryCards: 1290 self.addCard(hdu.header, key, value, desc) 1291 1292 for key, value, desc in extensionCards[2]: 1293 self.addCard(hdu.header, key, value, desc) 1294 1295 hdu.header.add_comment("Image created by WFAU") 1296 1297 # delete unwanted headers 1298 fitsFile.close() 1299 fits.removeDuplicateKeywords(filePath) 1300 # Make sure VSA_MFID,CASUVERS are correct 1301 fitsFile = fits.open(filePath, 'update') 1302 del fitsFile[0].header['VSA_TIME'] 1303 self.addCard(fitsFile[0].header, 'VSA_MFID', field.fileID, 1304 'VSA multiframeID') 1305 self.addCard(fitsFile[0].header, 'CASUVERS', primaryInfo.casuVers, 1306 'CASU software version') 1307 1308 1309 # 55% of total processing cost, but I/O limited - cannot parallelise 1310 fits.checksum([filePath]) 1311 os.rename(filePath, filePath.replace('.fz', '')) 1312 fits.compressFits([filePath.replace('.fz', '')]) 1313 os.rename(filePath.replace('.fz', ''), filePath) 1314 # @TODO: Eventually remove this - hack for mosaics 1315 if isMosaic: 1316 hdulist = fits.open(filePath, 'update') 1317 hdulist[1].header['ZEXTEND'] = False 1318 del hdulist[1].header['ZTENSION'] 1319 del hdulist[1].header['ZPCOUNT'] 1320 del hdulist[1].header['ZGCOUNT'] 1321 hdulist.close() 1322 # @FIXME: Inefficient 1323 fits.checksum([filePath]) 1324 if os.path.exists(filePath): 1325 os.rename(filePath, filePath.replace('.fz', '')) 1326 fits.compressFits([filePath.replace('.fz', '')]) 1327 os.rename(filePath.replace('.fz', ''), filePath) 1328 1329 self.delKeysDict[field.fileID] = delKeysList
1330 1331 #-------------------------------------------------------------------------- 1332
1333 - def outgestCatalogues(self, deepTiles):
1334 """ Outgest FITS files from detection table for each deep tile. 1335 """ 1336 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1337 os.path.join(self._fileDir, 'images'), 1338 fileType="cat") 1339 1340 detTable = self._progSchema[self.programme.getAttr('detectionTable')] 1341 columns = detTable.columns 1342 fileIdStr = "multiframeID" 1343 1344 excludedColumns = ['cx', 'cy', 'cz', 'htmid'] 1345 includedColumns = {'VIDEO': ['objid', 'filterid', 'x', 'y', 'ra', 'dec', 1346 'apermagnoapercorr3', 'apermag3err as apermagnoapercorr3err', 1347 'apermag3', 'apermag3err', 'apermagnoapercorr6', 1348 'apermag6err as apermagnoapercorr6err', 'apermag6', 'apermag6err', 1349 'kronmag as automag', 'kronmagerr as automagerr', 'halfrad', 1350 'petromag', 'petromagerr', 'errbits', 'classstat']} 1351 1352 if self.programme.getAcronym().upper() in includedColumns: 1353 # Order by 1354 aliasesDict = dict([(ii, (ic.lower().split(' as ')[1] 1355 if ' as ' in ic.lower() else '')) 1356 for ii, ic in enumerate(includedColumns[self.programme.getAcronym().upper()])]) 1357 incColumns = [] 1358 selectStrColNames = includedColumns[self.programme.getAcronym().upper()] 1359 for ii, ic in enumerate(includedColumns[self.programme.getAcronym().upper()]): 1360 col = [copy.deepcopy(column) for column in columns 1361 if column.name.lower() == ic.split()[0]].pop() 1362 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 1363 col.name = aliasesDict[ii] 1364 incColumns.append(col) 1365 1366 1367 columns = incColumns 1368 else: 1369 columns = [column for column in columns if column.name.lower() not in excludedColumns] 1370 selectStrColNames = [col.name for col in columns] 1371 1372 1373 if not self.releaseDB.isLoadDb: 1374 selectStr = ', '.join(map(str, selectStrColNames)) 1375 fromStr = detTable.name 1376 fileIdAlias = detTable.name 1377 else: 1378 fileIdAlias = 'R' 1379 joinList = [(self._progSchema[detTable.name + "Raw"], fileIdAlias), 1380 (self._progSchema[detTable.name + "Astrometry"], 'A'), 1381 (self._progSchema[detTable.name + "Photometry"], 'P')] 1382 1383 commonCols = utils.getDuplicates(utils.unpackList( 1384 map(str, table.columns) for table, _alias in joinList)) 1385 1386 # @TODO: More complicated, but is this ever used - needs fixing if so? 1387 selectStr = ', '.join( 1388 ('%s.%s' % (fileIdAlias, column) if str(column) in commonCols else 1389 str(column)) 1390 for column in self._progSchema[detTable.name].columns) 1391 1392 fromStr = Join([(self.releaseDB.tablePath(table.name), alias) 1393 for table, alias in joinList], 1394 detTable.primaryKey()) 1395 1396 if self.isQuickRun: 1397 selectStr = "TOP 10 " + selectStr 1398 1399 deepTiles = [deepTile for deepTile in deepTiles 1400 if self.sysc.confSuffix not in deepTile.fileName 1401 and not os.path.exists(fnFormat.getFilePath(deepTile))] 1402 Logger.addMessage("Creating catalogue files for %s deep tiles..." 1403 % len(deepTiles)) 1404 1405 progress = ForLoopMonitor(deepTiles) 1406 for deepTile in deepTiles: 1407 if self.esoProgReq.photSys == 'AB': 1408 vegaToAB = self.releaseDB.query( 1409 "vegaToAB", "Filter", "shortName='%s'" % deepTile.filterName, 1410 firstOnly=True) 1411 selectStrFinal = self.modifyToAB(selectStr, vegaToAB) 1412 else: 1413 selectStrFinal = selectStr 1414 filePath = fnFormat.getFilePath(deepTile) 1415 self.updateDirDict(filePath) 1416 query = SelectSQL(selectStrFinal, fromStr, 1417 where="%s.%s=%s AND %s.seqNum>0" 1418 % (fileIdAlias, fileIdStr, deepTile.fileID, fileIdAlias)) 1419 1420 # Run each file outgest as a separate process to avoid memory leaks 1421 outgest = Process(target=self.outgestCatalogueFile, 1422 args=(filePath, columns, query, deepTile)) 1423 outgest.start() 1424 outgest.join() 1425 if outgest.exitcode: 1426 raise EsoRelease.CuError("Forked outgest process failed." 1427 " Please check stdout.") 1428 1429 progress.testForOutput()
1430 1431 #-------------------------------------------------------------------------- 1432
1433 - def modifyToAB(self, selectStr, vegaToAB, filters=None):
1434 """ 1435 """ 1436 if not filters: 1437 parts = [] 1438 for part in selectStr.split(','): 1439 if 'mag' in part.lower() and 'err' not in part.lower(): 1440 if ' as ' in part.lower(): 1441 origName, alias = part.split(' as ') 1442 part = '(%s + %s) as %s' % (origName, vegaToAB, alias) 1443 else: 1444 part = '(%s + %s) as %s' % (part, vegaToAB, part) 1445 else: 1446 part = part 1447 parts.append(part) 1448 return ','.join(parts) 1449 else: 1450 # Source table 1451 parts = [] 1452 for part in selectStr.split(','): 1453 if 'mag' in part.lower() and 'err' not in part.lower(): 1454 # Mags 1455 for index, band in enumerate(filters): 1456 if band in part: 1457 if ' as ' in part.lower(): 1458 origName, alias = part.split(' as ') 1459 part = '(%s + %s) as %s' % (origName, vegaToAB[index], alias) 1460 else: 1461 part = '(%s + %s) as %s' % (part, vegaToAB[index], part) 1462 elif ('pnt' in part.lower() or 'ext'in part.lower()) and 'err' not in part.lower(): 1463 # cols 1464 primVtoAB = [vegaToAB[index] for index, band in enumerate(filters) if band + 'm' in part][0] 1465 secVtoAB = [vegaToAB[index] for index, band in enumerate(filters) if 'm' + band in part][0] 1466 part = '(%s + %s - %s) as %s' % (part, primVtoAB, secVtoAB, part) 1467 else: 1468 part = part 1469 parts.append(part) 1470 return ','.join(parts)
1471 1472 #-------------------------------------------------------------------------- 1473
1474 - def outgestCatalogueFile(self, filePath, columns, query, deepTile):
1475 """ Outgests a single ESO catalogue file product. 1476 """ 1477 # Prepare catalogue data 1478 fitsTable = self.queryFitsTable(columns, query) 1479 1480 # Metadata 1481 whereStr = query.whereStr.split(" AND ")[0].split('.', 1)[-1] 1482 primary = pyfits.PrimaryHDU() 1483 band = '' 1484 for hdu, metadataTables \ 1485 in [(primary, ["Multiframe", "MultiframeEsoKeys"]), 1486 (fitsTable, ["MultiframeDetector", "CurrentAstrometry"])]: 1487 1488 for tableName in metadataTables: 1489 for column, value \ 1490 in self.queryMetadata(self._metaSchema[tableName], whereStr): 1491 self.addKey(hdu, column, value) 1492 if "FILTER" in hdu.header: 1493 band = hdu.header["FILTER"] 1494 self.addStandardKeys(primary.header, fitsTable.header, [deepTile], 1495 [band], tableType='CAT', field=deepTile) 1496 for key in self.delKeysDict[deepTile.fileID]: 1497 if key in primary.header: 1498 del primary.header[key] 1499 if key in fitsTable.header: 1500 del fitsTable.header[key] 1501 # Write the FITS file 1502 pyfits.HDUList([primary, fitsTable]).writeto(filePath, checksum=False) 1503 fits.checksum([filePath])
1504 1505 #-------------------------------------------------------------------------- 1506
1507 - def queryFitsTable(self, columns, sql, tableType='NONE'):
1508 """ 1509 Outgest catalogue data corresponding to given field. 1510 1511 """ 1512 cppLimitDict = {'real':3.40282e+038, 'float':1.79769e+308} 1513 problemDataColumns = {"averageconf":[0, None], "meanmag":[0, None]} 1514 1515 dataNum = {} 1516 if 'MD' not in tableType: 1517 data = dict(zip(columns, 1518 zip(*self.releaseDB.query(sql.selectStr, sql.fromStr, sql.whereStr)))) 1519 for column in columns: 1520 if column.dataType == 'real' or column.dataType == 'float': 1521 da = numpy.array(data[column]) 1522 da = self.problemColumnCheck(da, column, problemDataColumns) 1523 da = numpy.where(numpy.abs(numpy.divide(numpy.subtract(da, dbc.realDefault()), dbc.realDefault())) < 0.0001, numpy.nan, da) 1524 dataNum[column] = numpy.where(numpy.abs(numpy.divide(numpy.subtract(numpy.abs(da), cppLimitDict[column.dataType]), cppLimitDict[column.dataType])) < 0.0001, numpy.nan, da) 1525 else: 1526 dataNum[column] = numpy.array(data[column]) 1527 return self.createTableHdu(columns, tableType, dataNum)
1528 1529 #-------------------------------------------------------------------------- 1530
1531 - def problemColumnCheck(self, da, column, problemDataColumns):
1532 """ Checks whether column is a problem column and replaces dodgy 1533 defaults with correct values 1534 """ 1535 1536 for colNamePart in problemDataColumns: 1537 if colNamePart in column.name.lower(): 1538 minDataValue, maxDataValue = problemDataColumns[colNamePart] 1539 if minDataValue is not None: 1540 da = numpy.where(da < minDataValue, dbc.realDefault(), da) 1541 if maxDataValue is not None: 1542 da = numpy.where(da > maxDataValue, dbc.realDefault(), da) 1543 return da
1544 1545 #-------------------------------------------------------------------------- 1546 1547
1548 - def getNearestFilter(self, filterName, possibleFilters):
1549 """ 1550 """ 1551 curFilterInfo = self.releaseDB.query("shortName, (cutOn+cutOff) as medLamb", 1552 "Filter", "shortName like '%s'" % filterName, 1553 firstOnly=True) 1554 filterInfo = self.releaseDB.query("shortName, (cutOn+cutOff) as medLamb", 1555 "Filter", "shortName in (%s)" % 1556 (','.join(["'%s'" % fName for fName in possibleFilters]))) 1557 absDev = [(fInfo.shortName, abs(fInfo.medLamb - curFilterInfo.medLamb)) 1558 for fInfo in filterInfo] 1559 1560 return sorted(absDev, key=itemgetter(1))[0][0]
1561 1562 #-------------------------------------------------------------------------- 1563
1564 - def outgestSources(self, isSourceList=False):
1565 """ Outgest FITS files from source table for each frame set. 1566 @TODO: (priOrSec = 0 || priOrSec=frameSetID) as primary_source 1567 VVV - simpler for DR2? Be careful. Just primary sources selected? 1568 """ 1569 1570 1571 self.catType = 'srcCat' 1572 columnsTypesExclude = ['PsfMag', 'PsfMagErr', 'SerMag2D', 'SerMag2DErr'] 1573 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 1574 sourceTable = self._progSchema[self.programme.getSourceTable()] 1575 varTable = self._progSchema.get(self.programme.getVariabilityTable()) 1576 # Main data query 1577 iauNameSel, iauNameAttr = self.getIAUNameColSel('s' 1578 if self.esoProgReq.fstEpchSrc == 1 else sourceTable.name) 1579 aliasesDict = {} 1580 # @TODO: Primary sources only (priOrSec=0 or priOrSec=frameSetID) 1581 filters = [col.name.replace("mfID", '') for col in mergeLog.columns 1582 if "mfID" in col.name] 1583 colours = [col.name.lower().replace("pnt", '') for col in sourceTable.columns 1584 if col.name.lower().endswith("pnt")] 1585 excludedColumns = ['cx', 'cy', 'cz', 'htmid'] 1586 # @TODO: For VIDEO - add in SExtractor half rad column into schema for next release 1587 1588 includedColumnsProg = {'VIDEO': 1589 {'merged': ['sourceid', 'ra', 'dec', 'mergedclass', 'ebv'], 1590 'colour': ['pnt', 'pnterr', 'ext', 'exterr'], 1591 'filter': ['petromag', 'petromagerr', 'apermag3', 'apermag3err', 1592 'apermag6', 'apermag6err', 'apermagnoapercorr3', 1593 'apermag3err as apermagnoapercorr3err', 1594 'apermagnoapercorr6', 'apermag6err as apermagnoapercorr6err', 1595 'errbits', 'classstat', 'kronmag as automag', 'kronmagerr as automagerr', 'halfrad']}} 1596 1597 includedColumns = [] 1598 if self.programme.getAcronym().upper() in includedColumnsProg: 1599 includedColumns = includedColumnsProg[self.programme.getAcronym().upper()]['merged'] 1600 for colour in colours: 1601 includedColumns.extend([colour.lower() + cType.lower() 1602 for cType in includedColumnsProg[self.programme.getAcronym().upper()]['colour']]) 1603 for band in filters: 1604 includedColumns.extend([self.addBand(cType, band) 1605 for cType in includedColumnsProg[self.programme.getAcronym().upper()]['filter']]) 1606 1607 for band in filters: 1608 excludedColumns.extend([band.lower() + cType.lower() for cType in columnsTypesExclude]) 1609 1610 if self.esoProgReq.fstEpchSrc == 1: 1611 # @TODO: Redo this for VVV DR2 plus, will be much more straightforward 1612 synSrcTable = self._progSchema[self.programme.getSynopticSourceTable()] 1613 mEpochFilters = self.esoProgReq.mEpochFilters.split(',') 1614 expectedOBMatches = self.esoProgReq.expOBMatches.split(';') 1615 obFilters = [ob.split(',') for ob in expectedOBMatches] 1616 if len(mEpochFilters) == 1 and len(mEpochFilters) < len(filters): 1617 # select frames! 1618 mEpochFilter = mEpochFilters[0] 1619 mEpochFilterID = self.releaseDB.query("filterID", "Filter", 1620 "shortName like '%s'" % mEpochFilter, firstOnly=True) 1621 sEpochFilters = [band for band in filters if band.lower() != mEpochFilter.lower()] 1622 1623 1624 nearestSEpchFilter = self.getNearestFilter(mEpochFilter, sEpochFilters) 1625 synMergeLogLookUpDict = defaultdict(dict) 1626 for band in filters: 1627 synMergeLogLookUpDict[band] = dict(self.releaseDB.query( 1628 "%smfID,synFrameSetID" % band, 1629 self.programme.getSynopticMergeLogTable(), 1630 "%smfID>0" % band)) 1631 1632 goodFrameSel = ("" if not self.completeFilters else 1633 ''.join([" AND %smfID<0" for band in sEpochFilters]) + 1634 " AND m.multiframeID>0") 1635 # select shallow Ks OBs 1636 1637 frames = self.releaseDB.query( 1638 "frameSetID,%s" % ','.join("%smfID" % flt for flt in filters), 1639 "%s as l,Multiframe as m" % self.programme.getMergeLogTable(), 1640 "l.%smfID=m.multiframeID and m.frameType not like " 1641 "'%%deep%%'%s" % (mEpochFilter, goodFrameSel)) 1642 1643 # Deep Ks select obsName not like '%v-%' 1644 selectStr = ("l.frameSetID,%s,m.multiframeID as %smfID" % 1645 (','.join(["%smfID" % flt for flt in sEpochFilters]), 1646 mEpochFilter)) 1647 fromStr = ("%s as l,Multiframe as md, ProgrammeFrame as p," 1648 "Multiframe as m,MultiframeDetector as mfd," 1649 "MultiframeEsoKeys as e, CurrentAstrometry as c" % 1650 self.programme.getMergeLogTable()) 1651 whereStr = ("l.ksmfID=md.multiframeID and md.frameType like " 1652 "'%%deep%%' and p.programmeID=%s and p.multiframeID=" 1653 "m.multiframeID and m.frameType='tilestack' and " 1654 "m.filterID=%s and m.multiframeID=e.multiframeID and " 1655 % (self.programmeID, mEpochFilterID) + 1656 "m.multiframeID=mfd.multiframeID and m.multiframeID=c.multiframeID and " 1657 "dbo.fGreatCircleDist(l.ra,l.dec,c.centralRa," 1658 "c.centralDec)<3 and (m.deprecated in (0,50,51) or " 1659 "(m.deprecated=100 and mfd.deprecated=0))") 1660 1661 frames += self.releaseDB.query( 1662 selectStr=selectStr, 1663 fromStr=fromStr, whereStr=whereStr + (" and e.obsName not like " 1664 "'%v-%'" + goodFrameSel)) 1665 obTileObsQuery = SelectSQL("frameSetID", fromStr, 1666 whereStr + (" and e.obsName not like '%v-%'")) 1667 # select nearest in time of others 1668 frames += self.releaseDB.query( 1669 selectStr=selectStr, 1670 fromStr="%s as l, Multiframe as m, (select l.frameSetID, " 1671 "min(m.mjdObs) as minMjdObs from %s as l, " 1672 "Multiframe as md, ProgrammeFrame as p, Multiframe as m, " 1673 "MultiframeEsoKeys as e, CurrentAstrometry as c, " 1674 "MultiframeDetector as mfd, Multiframe as ne where %s " 1675 "and ne.multiframeID=l.%smfID AND frameSetID not in (%s) " 1676 "group by l.frameSetID) as T" % ( 1677 self.programme.getMergeLogTable(), 1678 self.programme.getMergeLogTable(), 1679 whereStr + (" and e.obsName like '%v-%'"), nearestSEpchFilter, obTileObsQuery), 1680 whereStr="l.frameSetID = T.frameSetID and m.mjdObs = " 1681 "T.minMjdObs and m.frameType = 'tilestack'%s" % 1682 goodFrameSel) 1683 # frameTypeSel = queries.getFrameSelection('tile', deepOnly=True, 1684 # alias='m', selType='%stack') 1685 # Sort out any deep ZYJH 1686 replacementDict = dict(self.releaseDB.query( 1687 "distinct m.multiframeID,m2.multiframeID", 1688 "ProgrammeFrame as p,Multiframe as m,Provenance as v," 1689 "Provenance as v2,Provenance as v3, Multiframe as m2", 1690 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 1691 "m.frameType like '%%tile%%deep%%stack' and m.filterID!=%s and " 1692 "m.multiframeiD=v.combiframeID and v.multiframeID=" 1693 "v2.combiframeID and v2.multiframeID=v3.multiframeID and " 1694 "v3.combiframeID=m2.multiframeID and m2.frameType " 1695 "like 'tilestack'" % (self.programmeID, mEpochFilterID))) 1696 1697 1698 finalFrames = [] 1699 for frameInfo in frames: 1700 fInfo = list(frameInfo) 1701 for index, _filtName in enumerate(sEpochFilters): 1702 if fInfo[index + 1] in replacementDict: 1703 fInfo[index + 1] = replacementDict[fInfo[index + 1]] 1704 finalFrames.append(fInfo) 1705 1706 1707 1708 # Sort out selection and columns 1709 # 1710 # Positional info 1711 basicSTSel = ("sourceID,frameSetID,ra,dec,l,b," 1712 "lambda,eta,priOrSec,mergedClassStat,mergedClass") 1713 # UNION - check at frame level... 1714 1715 colList = [] 1716 for ob in obFilters: 1717 for index, band in enumerate(ob): 1718 if index < len(ob) - 1: 1719 colList.append("ss%s.%sm%sPnt,ss%s.%sm%sPntErr" % 1720 (band.lower(), band.lower(), ob[index + 1].lower(), 1721 band.lower(), band.lower(), ob[index + 1].lower())) 1722 1723 1724 colourSel = ','.join(colList) 1725 # 1726 # bandPass attrs 1727 # 1728 # 1729 bandPassAttrs = ("ss%s.%sAperMag1,ss%s.%sAperMag1Err," 1730 "ss%s.%sAperMag3,ss%s.%sAperMag3Err,ss%s.%saperMag4," 1731 "ss%s.%saperMag4Err,ss%s.%sGausig,ss%s.%sPA," 1732 "ss%s.%sEll,ss%s.%sppErrBits,ss%s.%sAverageConf," 1733 "ss%s.%sSeqNum,ss%s.%sXi,ss%s.%sEta") 1734 bandPassList = [attr.replace('ss%s.%s', '').lower() 1735 for attr in bandPassAttrs.split(',')] 1736 columns = [iauNameAttr] 1737 srcColumns = [column for column in sourceTable.columns 1738 if column.name in basicSTSel] 1739 fromStr = "%s as s" % (self.programme.getSourceTable()) # ,self.programme.getMergeLogTable()) 1740 whereStr = "" # "s.frameSetID=l.frameSetID" 1741 colourColumns = [] 1742 for colTerms in colList: 1743 colAttrs = [attr.split('.')[1].lower() for attr in colTerms.split(',')] 1744 colourColumns.extend([column for column in synSrcTable.columns 1745 if column.name.lower() in colAttrs]) 1746 synSrcColumns = [] 1747 bandSel = "" 1748 for band in filters: 1749 # Some modifications required... 1750 bandNameLength = len(band) 1751 1752 # Get order correct? 1753 columns = [] 1754 for name in bandPassList: 1755 columns += [column for column in synSrcTable.columns 1756 if column.name.lower().startswith(band.lower()) and 1757 column.name[bandNameLength:].lower() == name] 1758 synSrcColumns.extend(columns) 1759 bandSel += ',' + bandPassAttrs % tuple([band for _index in range(28)]) 1760 fromStr += ",%sXSynopticSourceBestMatch as bm%s" % ( 1761 self.programme.getSourceTable(), band) 1762 fromStr += ",%s as ss%s" % ( 1763 self.programme.getSynopticSourceTable(), band) 1764 fromStr += ",%s as sl%s " % ( 1765 self.programme.getSynopticMergeLogTable(), band) 1766 whereStr += ("s.sourceID=bm%s.sourceID AND " 1767 "bm%s.synFrameSetID=sl%s.synFrameSetID AND " 1768 "bm%s.synFrameSetID=ss%s.synFrameSetID AND " 1769 "bm%s.synSeqNum=ss%s.synSeqNum AND " % 1770 tuple([band for index in range(7)])) 1771 1772 varClassCol = "variableClass" 1773 varAttr = varTable.attribute[varClassCol] 1774 varAttr.name = "VARFLAG" 1775 columns = [iauNameAttr] + srcColumns + colourColumns + synSrcColumns + [varAttr] 1776 selectStr = (iauNameSel + ', ' + 1777 ', '.join("s.%s" % column for column in srcColumns) + 1778 ', ' + colourSel + bandSel) 1779 # ', '.join("ss1.%s" % (column) 1780 # for column in synSrcColumns1) + ', ' + 1781 # ', '.join("%ss2.%s" % (column) 1782 # for column in synSrcColumns2)) 1783 selectStr += ", ISNULL(%s, %s) AS %s" \ 1784 % (varClassCol, dbc.intDefault(), varAttr) 1785 fromStr += ",%s as v" % self.programme.getVariabilityTable() 1786 whereStr += "v.sourceID=s.sourceID" 1787 else: 1788 if self.programme.getAcronym().upper() in includedColumnsProg: 1789 aliasesDict = dict([(ii, (ic.lower().split(' as ')[1] 1790 if ' as ' in ic.lower() else '')) 1791 for ii, ic in enumerate(includedColumns)]) 1792 nonNullSTColumns = [] 1793 selectStrColNames = includedColumns 1794 for ii, ic in enumerate(includedColumns): 1795 col = [copy.deepcopy(column) for column in sourceTable.columns if column.name.lower() == ic.split()[0]].pop() 1796 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 1797 1798 col.name = aliasesDict[ii] 1799 nonNullSTColumns.append(col) 1800 else: 1801 nonNullSTColumns = [column for column in sourceTable.columns 1802 if column.name.lower() not in excludedColumns] 1803 selectStrColNames = [column.name for column in nonNullSTColumns] 1804 1805 1806 if not self.esoProgReq.incVariables: 1807 columns = [iauNameAttr] + nonNullSTColumns 1808 selectStr = iauNameSel + ', ' + ', '.join(selectStrColNames) 1809 fromStr = sourceTable.name 1810 whereStr = "" 1811 else: 1812 varClassCol = "variableClass" 1813 varAttr = varTable.attribute[varClassCol] 1814 varAttr.name = "VARFLAG" 1815 columns = [iauNameAttr] + nonNullSTColumns + [varAttr] 1816 1817 # Handle duplicate column names 1818 selectStr = iauNameSel + ', ' + ', '.join(("%s.%s" % (sourceTable.name, colName) 1819 if colName.split()[0] in [col.name for col in varTable.columns] else 1820 colName) 1821 for colName in selectStrColNames) 1822 1823 selectStr += ", ISNULL(%s, %s) AS %s" \ 1824 % (varClassCol, dbc.intDefault(), varAttr) 1825 # @TODO: More complicated if SynopticSource. 1826 # if VMC - first epoch. 1827 # if VVV ZY epoch + JHK epoch. 1828 fromStr = PkLeftJoin(sourceTable, varTable) 1829 whereStr = "" 1830 1831 if not isSourceList: 1832 raIndex = [index for index, column in enumerate(columns) 1833 if column.name == 'ra'][0] 1834 decIndex = [index for index, column in enumerate(columns) 1835 if column.name == 'dec'][0] 1836 columns[raIndex].name = 'ra2000' 1837 columns[decIndex].name = 'dec2000' 1838 if self.isQuickRun: 1839 selectStr = "TOP 10 " + selectStr 1840 1841 1842 1843 # sIDIndex = [index for index, column in enumerate(columns) 1844 # if column.name == 'sourceID'][0] 1845 # sIDtag = columns[sIDIndex].tag 1846 # for key in sIDtag: 1847 # if key == '--/C': 1848 # sIDtag[key] = 'meta.id' 1849 # columns[sIDIndex].tag = sIDtag 1850 if self.esoProgReq.photSys == 'AB': 1851 vegaToAB = self.releaseDB.query( 1852 "vegaToAB", "Filter", "shortName in (%s) order by filterID" 1853 % ','.join(["'%s'" % shtName for shtName in filters])) 1854 1855 selectStr = self.modifyToAB(selectStr, vegaToAB, filters=filters) 1856 1857 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1858 os.path.join(self._fileDir, 'images') if isSourceList else 1859 os.path.join(self._fileDir, "cat"), 1860 fileType="srcCat" if isSourceList else "finalSourceCat", 1861 filters=''.join(filters)) 1862 if self.fieldIDs: 1863 # @TODO: convert to correct list 1864 1865 # 1866 1867 frameSetIDs = ','.join( 1868 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 1869 for fieldID in self.fieldIDs) 1870 1871 # Work through each file by fileID 1872 fileIdStr = mergeLog.primaryKey() 1873 fields = self.releaseDB.query( 1874 selectStr=fileIdStr + " AS fileID, ra*24/360 AS ra, dec", 1875 fromStr=mergeLog, 1876 whereStr=DepCodes.selectNonDeprecated 1877 + (" AND frameSetID IN (%s)" % frameSetIDs 1878 if self.fieldIDs else ""), 1879 orderBy=fileIdStr) 1880 # Cover the case of continuing from an interruption 1881 1882 self.srcCatFieldDict = dict([(field.fileID, fnFormat.getFilePath(field)) 1883 for field in fields]) 1884 self.fieldInfoDict = {} 1885 for field in fields: 1886 if self.esoProgReq.fstEpchSrc == 1: 1887 frameInfo = [fInfo for fInfo in finalFrames if fInfo[0] == field.fileID][0] 1888 fieldInfo = self.getFieldInfo(field, frameInfo, filters) 1889 self.fieldInfoDict[field.fileID] = fieldInfo 1890 else: 1891 self.fieldInfoDict[field.fileID] = None 1892 # @TODO: This makes metadata file incorrect 1893 # fields = [field for field in fields 1894 # if not os.path.exists(fnFormat.getFilePath(field))] 1895 1896 Logger.addMessage("Creating source table files for %s frame sets..." 1897 % len(fields)) 1898 1899 if not isSourceList: 1900 utils.ensureDirExist(os.path.join(self._fileDir, "cat")) 1901 1902 notVarFrameSetsList = self.releaseDB.query("frameSetID", 1903 self.programme.getMergeLogTable(), 1904 "frameSetID NOT IN (select frameSetID from %s)" % 1905 self.programme.getVarFrameSetInfoTable()) 1906 1907 fields = [] if self.piOnly else fields 1908 progress = ForLoopMonitor(fields) 1909 self.magLim = defaultdict(list) 1910 self.nPointings = len(fields) 1911 for field in fields: 1912 self.regionDict[field.fileID] = (self.getRegion(field.fileID) 1913 if self.isRegion else 1914 None) 1915 skipField = False 1916 if field.fileID in notVarFrameSetsList: 1917 skipField = True 1918 1919 if self.isRegion: 1920 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1921 os.path.join(self._fileDir, 'images') if isSourceList else 1922 os.path.join(self._fileDir, "cat/%s" % self.regionDict[field.fileID][0]), 1923 fileType="srcCat" if isSourceList else "finalSourceCat", 1924 filters=''.join(filters)) 1925 utils.ensureDirExist(os.path.join(self._fileDir, "cat/%s" % 1926 self.regionDict[field.fileID][0])) 1927 1928 filePath = fnFormat.getFilePath(field) 1929 self.updateDirDict(filePath) 1930 selStr = selectStr 1931 frStr = fromStr 1932 whStr = whereStr 1933 srcConst = (" AND %s" % self.esoProgReq.srcConstraints 1934 if self.esoProgReq.srcConstraints != 'NONE' else "") 1935 query = SelectSQL(selStr, frStr, 1936 where="%s.%s=%s%s" % (sourceTable, fileIdStr, field.fileID, 1937 srcConst)) 1938 # 1939 mfIDList = [] 1940 if self.esoProgReq.fstEpchSrc == 1: 1941 finalWhereStr = whStr 1942 frameInfo = [fInfo for fInfo in finalFrames if fInfo[0] == field.fileID][0] 1943 synMLList = [] 1944 1945 for index, band in enumerate(filters): 1946 if frameInfo[index + 1] > 0: 1947 finalWhereStr += " AND sl%s.%smfID=%s" % (band, band, frameInfo[index + 1]) 1948 if frameInfo[index + 1] in synMergeLogLookUpDict[band]: 1949 synMLList.append(synMergeLogLookUpDict[band][frameInfo[index + 1]]) 1950 mfIDList.append(frameInfo[index + 1]) 1951 else: 1952 Logger.addMessage("Skipping field, due to recalibration-deprecation issue") 1953 synMLList.append(dbc.intDefault()) 1954 mfIDList.append(dbc.intDefault()) 1955 skipField = True 1956 else: 1957 synFrameSetID = self.releaseDB.query( 1958 "synFrameSetID", 1959 "%s as sl,%s as l" % (self.programme.getSynopticMergeLogTable(), 1960 self.programme.getMergeLogTable()), 1961 "l.frameSetID=%s AND dbo.fGreatCircleDist(" 1962 "l.ra,l.dec,sl.ra,sl.dec)<3 AND sl.%smfID=%s" 1963 % (field.fileID, band, dbc.intDefault()), firstOnly=True) 1964 1965 finalWhereStr += (" AND sl%s.synFrameSetID=%s" % (band, synFrameSetID)) 1966 synMLList.append(dbc.intDefault()) 1967 mfIDList.append(dbc.intDefault()) 1968 1969 if skipField: 1970 self.skipFieldList.append(field.fileID) 1971 for obIndex, ob in enumerate(obFilters): 1972 for index, band in enumerate(ob): 1973 if index < len(ob) - 1: 1974 filtIndex = getFiltIndex(obFilters, obIndex, index) 1975 if synMLList[filtIndex] != synMLList[filtIndex + 1]: 1976 selStr.replace("ss%s.%sm%sPnt" % (band, band, ob[index + 1]), 1977 "%s as %sm%sPnt" % (dbc.realDefault(), band, ob[index + 1])) 1978 selStr.replace("ss%s.%sm%sPntErr" % (band, band, ob[index + 1]), 1979 "%s as %sm%sPntErr" % (dbc.realDefault(), band, ob[index + 1])) 1980 1981 query = SelectSQL(selStr, frStr, 1982 where=finalWhereStr + " AND s.frameSetID=%s" % (field.fileID)) 1983 # Run each file outgest as a separate process to avoid memory leaks 1984 if not os.path.exists(filePath) and not skipField and not self.skipSources: 1985 outgest = Process(target=self.outgestSourceFile, 1986 args=(filePath, columns, query, field, filters, 1987 'SL' if isSourceList else 'SFSC')) 1988 1989 outgest.start() 1990 outgest.join() 1991 if outgest.exitcode: 1992 raise EsoRelease.CuError("Forked outgest process failed." 1993 " Please check stdout.") 1994 1995 if not skipField: 1996 self.appendMagLims2(field.fileID, mergeLog, mfIDList) 1997 # self.appendMagLims(filePath) 1998 progress.testForOutput() 1999 if not isSourceList and not self.isRegion: 2000 query = SelectSQL(selectStr, fromStr, whereStr) 2001 fnFormatMD = FileNameFormat(self.programme, self.releaseNum, 2002 os.path.join(self._fileDir, "cat"), fileType="catMetaData", 2003 filters=''.join(filters)) 2004 filePath = fnFormatMD.getFilePath(None) 2005 self.updateDirDict(filePath) 2006 outgest = Process(target=self.outgestSourceFile, 2007 args=(filePath, columns, query, None, filters, 'SMD')) 2008 if not os.path.exists(filePath) or self.piOnly: 2009 outgest.start() 2010 outgest.join()
2011 #-------------------------------------------------------------------------- 2012
2013 - def addBand(self, cType, band):
2014 """ Adds filter name to name and alias 2015 """ 2016 return ' as '.join([band.lower() + part for part in cType.lower().split(' as ')])
2017 2018 #-------------------------------------------------------------------------- 2019
2020 - def updateAliasedColName(self, columns, aliasesDict):
2021 """ If a column is aliased in the select str, change the column name 2022 """ 2023 2024 2025 for ii, _column in enumerate(columns): 2026 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 2027 columns[ii].name = aliasesDict[ii] 2028 return columns
2029 2030 2031 #-------------------------------------------------------------------------- 2032 2033
2034 - def appendMagLims2(self, frameSetID, mergeLog, mfIDList):
2035 """ Append magnitude limits to self.magLim 2036 """ 2037 if not mfIDList: 2038 columns = [col.name for col in mergeLog.columns 2039 if "mfID" in col.name] 2040 mfIDList = self.releaseDB.query(','.join(columns), mergeLog.name, 2041 "frameSetID=%s" % frameSetID, firstOnly=True) 2042 2043 for ii, mfID in enumerate(mfIDList): 2044 index = ii + 1 2045 # getFilter, get abMagLim 2046 if mfID > 0: 2047 magLim = self.magLimSatDict[mfID][0] 2048 if magLim > 0: 2049 self.magLim[index] = magLim
2050 #-------------------------------------------------------------------------- 2051
2052 - def appendMagLims(self, filePath):
2053 """ Append magnitude limits to self.magLim 2054 """ 2055 hdulist = fits.open(filePath) 2056 index = 1 2057 while 'MAGLIM%s' % index in hdulist[0].header: 2058 magLim = hdulist[0].header['MAGLIM%s' % index] 2059 if magLim > 0: 2060 self.magLim[index].append(magLim) 2061 index += 1
2062 2063 #-------------------------------------------------------------------------- 2064
2065 - def updateDirDict(self, filePath):
2066 """ Updates directory dict if need be 2067 """ 2068 directory = os.path.dirname(filePath).split(self._fileDir)[1] 2069 if not directory in self.directoryDict: 2070 self.directoryDict[directory] = os.path.dirname(filePath)
2071 2072 #-------------------------------------------------------------------------- 2073
2074 - def getRegion(self, frameSetID=None, raDec=None, useFrameSetID=True):
2075 """ Select info from Multiframe 2076 """ 2077 2078 if useFrameSetID: 2079 fieldID = queries.getFieldID(self.releaseDB, frameSetID, self.programme) 2080 elif raDec: 2081 productType = 'mosaic' if self.areMosaics else 'tile' 2082 fieldID = self.releaseDB.query("fieldID", "Required%s" % productType, 2083 "dbo.fGreatCircleDist(ra,dec,%s,%s)<0.5 and programmeID=%s" 2084 % ((15 * raDec[0]), raDec[1], self.programmeID), firstOnly=True) 2085 2086 return (self.regionFieldIDDict[fieldID], self.regionPartsFieldIDDict[fieldID])
2087 #-------------------------------------------------------------------------- 2088
2089 - def getIAUNameColSel(self, alias, radp=None, decdp=None, 2090 ucdTag='meta.id'):
2091 """ Returns the IAU Name column and selectStr 2092 """ 2093 2094 radp = radp or EsoRelease.radp 2095 decdp = decdp or EsoRelease.decdp 2096 2097 iauNameSel = ("dbo.fIAUNameGen('%s',%s.ra,%s.dec,%s,%s)" % 2098 (self.programme.getAcronym().upper(), alias, alias, radp, decdp)) 2099 # name datatype column 2100 charLength = 22 + radp + decdp + len(self.programme.getAcronym()) 2101 iauNameAttr = schema.Attribute() 2102 iauNameAttr.name = 'IAUNAME' 2103 iauNameAttr.dataType = 'varchar(%s)' % charLength 2104 iauNameAttr.tag = {'--/U':'', '--/C': ucdTag, 2105 '--/D':'IAU Name (not unique)', '--/B':''} 2106 return iauNameSel, iauNameAttr
2107 2108 #--------------------------------------------------------------------------
2109 - def getFieldInfo(self, field, frameInfo, filters):
2110 """ returns a new namedtuple 2111 """ 2112 fieldString = 'fileID ra dec ' + ' '.join(["%smfID" % band for band in filters]) 2113 2114 ExtendedField = namedtuple('ExtendedField', fieldString) 2115 fieldList = [field.fileID, field.ra, field.dec] 2116 fieldList.extend([frameInfo[index + 1] for index, _band in enumerate(filters)]) 2117 return ExtendedField(*fieldList)
2118 #--------------------------------------------------------------------------
2119 - def getInfoForPIs(self, filePath, columns):
2120 """ 2121 """ 2122 outputDir = os.path.join(self._fileDir, 'piInfo') 2123 utils.ensureDirExist(outputDir) 2124 outFileName = os.path.join(outputDir, 2125 os.path.basename(filePath.replace('.fits', '.txt'))) 2126 outputLines = ['# Column definitions\n', '# Name; format; description\n'] 2127 for column in columns: 2128 outputLines.append('%s; %s; %s\n' % (column.name, 2129 EsoRelease.sqlToFitsDataType[column.dataType], 2130 column.tag['--/D'])) 2131 file(outFileName, 'w').writelines(outputLines)
2132 2133 #-------------------------------------------------------------------------- 2134
2135 - def outgestSourceFile(self, filePath, columns, query, field, bandList, 2136 tableType='NONE'):
2137 """ Outgests a single ESO source catalogue file product. 2138 """ 2139 # @FIXME - bring provenance in here 2140 # @FIXME - reduce number of queries and control structures.. 2141 2142 # Prepare catalogue data 2143 2144 if 'MD' in tableType or self.isRegion: 2145 self.getInfoForPIs(filePath, columns) 2146 if self.piOnly: 2147 return 2148 # @TODO: change - if external and no neighbour table.... 2149 if self.checkifExternal() and not self.hasNeighTable: 2150 # @TODO: Must modify this. 2151 fitsTable = self.convertExternalCats(columns, field, tableType, query) 2152 else: 2153 fitsTable = self.queryFitsTable(columns, query, tableType) 2154 # Metadata 2155 primary = pyfits.PrimaryHDU() 2156 2157 # Add links 2158 if 'cat' not in filePath and ('MD' in tableType or 'FSC' in tableType): 2159 self.addCatalogueLinks(fitsTable, columns, field) 2160 2161 2162 # @FIXME: If multi-epoch .... 2163 self.missingAdpFiles = False 2164 self.updateHeaders(tableType, field, primary, fitsTable, bandList) 2165 if self.missingAdpFiles: 2166 # If missing ADP files - don't create fits table 2167 return 2168 2169 del primary.header["VSA_MFID"] 2170 # @TODO: is this just for VIDEO or others too? 2171 # if 'FSC' in tableType: 2172 # filterName = os.path.basename(filePath).split('_')[3] 2173 # catType = os.path.basename(filePath).split('_')[4] 2174 # self.addCard(primary.header, "PROV1", 2175 # os.path.basename(filePath.replace(catType, 'srcCat').replace(filterName, ''.join(filters))), 2176 # "Originating image file") 2177 2178 # Write the FITS file 2179 pyfits.HDUList([primary, fitsTable]).writeto(filePath, checksum=True)
2180 2181 #-------------------------------------------------------------------------- 2182
2183 - def updateHeaders(self, tableType, field, primary, fitsTable, bandList):
2184 """ 2185 """ 2186 if tableType == 'SFSC' or tableType == 'SMD': 2187 # Merged -band 2188 self.updateMergedBandHeaders(tableType, field, primary, fitsTable, bandList) 2189 elif tableType == 'MFSC' or tableType == 'MMD': 2190 # Multi - epoch single band 2191 self.updateMPhotHeaders(tableType, field, primary, fitsTable, bandList) 2192 elif tableType == 'VFSC' or tableType == 'VMD': 2193 # Variability - multi-epoch (several bands) - point to MPHOT 2194 self.updateVarCatHeaders(tableType, field, primary, fitsTable, bandList) 2195 elif tableType == 'EFSC' or tableType == 'EMD': 2196 # External - merged band or multi-epoch 2197 self.updateMergedBandHeaders(tableType, field, primary, fitsTable, bandList)
2198 #-------------------------------------------------------------------------- 2199
2200 - def updateMergedBandHeaders(self, tableType, field, primary, fitsTable, bandList):
2201 """ 2202 """ 2203 # Query MergeLog for passband-specific metadata 2204 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 2205 isPassband = False 2206 mfID = dbc.intDefault() 2207 mfIDs = [] 2208 2209 # filters = [col.name.replace("mfID", '') for col in mergeLog.columns 2210 # if "mfID" in col.name] 2211 if 'MD' not in tableType: 2212 whereStr = "frameSetID=%s" % field.fileID 2213 fieldInfo = self.fieldInfoDict[field.fileID] 2214 else: 2215 whereStr = "" 2216 if self.fieldIDs: 2217 # @TODO: convert to correct list 2218 2219 # 2220 fSetIDs = [queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2221 for fieldID in self.fieldIDs] 2222 frameSetIDs = ','.join([str(fSetID) for fSetID in fSetIDs if fSetID not in self.skipFieldList]) 2223 self.nPointings = len(frameSetIDs.split(',')) 2224 whereStr = "frameSetID in (%s)" % frameSetIDs 2225 fieldInfo = None 2226 2227 2228 2229 2230 for column, value in self.queryMetadata(mergeLog, whereStr): 2231 isPassband = isPassband or "mfid" in column.name.lower() 2232 if not isPassband: 2233 self.addKey(primary, column, value) 2234 2235 elif "mfid" in column.name.lower(): 2236 passband = column.name[:-len("mfid")] 2237 # @TODO: No longer necessary 2238 if (tableType == 'MMD' or tableType == 'MFSC' and passband != self.band.lower()): 2239 mfID = dbc.intDefault() 2240 else: 2241 mfID = value 2242 if self.esoProgReq.fstEpchSrc == 1 and 'MD' not in tableType: 2243 mfID = getattr(fieldInfo, "%smfID" % passband) 2244 elif "enum" in column.name.lower() and mfID != dbc.intDefault(): 2245 eNum = value 2246 mfIDs.append(mfID) 2247 2248 self.addPbMetadata(primary, passband, 2249 self._metaSchema["Multiframe"], mfID) 2250 2251 self.addPbMetadata(primary, passband, 2252 self._metaSchema["MultiframeEsoKeys"], mfID) 2253 2254 self.addPbMetadata(fitsTable, passband, 2255 self._metaSchema["MultiframeDetector"], mfID, eNum) 2256 2257 self.addPbMetadata(fitsTable, passband, 2258 self._metaSchema["CurrentAstrometry"], mfID, eNum) 2259 # @TODO: temporary 2260 if 'MD' not in tableType: 2261 fitsTable.header.update("HIERARCH %s_ABMAGLIM" % passband.upper(), 2262 self.magLimSatDict[mfID][0], "AB magnitude limit") 2263 fitsTable.header.update("HIERARCH %s_ABSATMAG" % passband.upper(), 2264 self.magLimSatDict[mfID][1], "AB saturation limit") 2265 2266 # if len(mfIDs) is 1 and tableType == 'SFSC': 2267 # Logger.addMessage("<Info> Frame set contains only a single frame" 2268 # " for file %s. Skipping..." % os.path.basename(filePath)) 2269 2270 # return 2271 if 'MD' in tableType: 2272 # select all mfIDs 2273 mfIDs = [] 2274 for rowData in self.queryMetadata(mergeLog, whereStr, allRows=True): 2275 mfIDs += [value for column, value in rowData 2276 if "mfid" in column.name.lower()] 2277 frames = utils.arbSort(self.queryFileProducts(mfIDs), mfIDs, 2278 key=attrgetter('fileID')) 2279 self.addPrimaryFrameKeys(primary, frames[0]) 2280 self.addStandardKeys(primary.header, fitsTable.header, frames, bandList, 2281 tableType, field)
2282 2283
2284 - def updateMPhotHeaders(self, tableType, field, primary, fitsTable, bandList):
2285 """ 2286 """ 2287 band = bandList[0] 2288 # @FIXME = get mfID list earlier when data is generated.... 2289 2290 # Query MergeLog for passband-specific metadata 2291 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 2292 isCorrel = 'SynopticSource' in self.programme.getSynopticBestMatchTable() 2293 # filters = [col.name.replace("mfID", '') for col in mergeLog.columns 2294 # if "mfID" in col.name] 2295 if 'MD' not in tableType: 2296 fieldStr = "frameSetID=%s" % field.fileID 2297 fieldInfo = self.fieldInfoDict[field.fileID] 2298 else: 2299 fieldStr = "" 2300 if self.fieldIDs: 2301 # @TODO: convert to correct list 2302 2303 # 2304 fSetIDs = [queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2305 for fieldID in self.fieldIDs] 2306 frameSetIDs = ','.join([str(fSetID) for fSetID in fSetIDs if fSetID not in self.skipFieldList]) 2307 self.nPointings = len(frameSetIDs.split(',')) 2308 fieldStr = "frameSetID in (%s)" % frameSetIDs 2309 fieldInfo = None 2310 2311 isPassband = False 2312 for column, value in self.queryMetadata(mergeLog, fieldStr): 2313 isPassband = isPassband or "mfid" in column.name.lower() 2314 if not isPassband: 2315 self.addKey(primary, column, value) 2316 2317 # Get all epoch images from best-match table 2318 # "fileName fileID ra dec filterName frameType confID productID" 2319 2320 selectStr = ("distinct fileName,m.multiframeID,%s as ra,%s as dec,m.filterName," 2321 "m.frameType,m.confID,%s as productID" % (dbc.realDefault(), dbc.realDefault(), 2322 dbc.intDefault())) 2323 2324 2325 if isCorrel: 2326 fromStr = ("%s as b,%s as ml,Multiframe as m,%s as s,%s as l" % 2327 (self.programme.getSynopticBestMatchTable(), 2328 self.programme.getSynopticMergeLogTable(), 2329 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2330 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2331 "s.priOrSec=s.frameSetID) and b.synFrameSetID=ml.synFrameSetID and " 2332 "b.synFrameSetID>0 and ml.%smfID=m.multiframeID and l.frameSetID=" 2333 "s.frameSetID and m.multiframeID>0" % (fieldStr, band)) 2334 2335 if self.isCutOut: 2336 fromStr += (",(select frameSetID,synFrameSetID from %s as l1,%s as sl1 where " 2337 "dbo.fGreatCircleDist(l1.ra,l1.dec,sl1.ra,sl1.dec)<4) as mf" % 2338 (self.programme.getMergeLogTable(), self.programme.getSynopticMergeLogTable())) 2339 whereStr += (" and l.frameSetID=mf.frameSetID and mf.synFrameSetID=ml.synFrameSetID") 2340 2341 else: 2342 fromStr = ("%s as b,Multiframe as m,Filter as f,%s as s, %s as l" % 2343 (self.programme.getSynopticBestMatchTable(), 2344 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2345 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2346 "s.priOrSec=s.frameSetID) and b.multiframeID=m.multiframeID and " 2347 "m.filterID=f.filterID and f.shortName='%s' and l.frameSetID=" 2348 "s.frameSetID" % (fieldStr, band)) 2349 if self.isCutOut: 2350 fromStr += (",(select frameSetID,c.multiframeID from %s as l1,CurrentAstrometry as c," 2351 "ProgrammeFrame as p,Multiframe as m where p.programmeID=%s and " 2352 "p.multiframeID=m.multiframeID and m.frameType='tilestack' " 2353 "and m.multiframeID=c.multiframeID and dbo.fGreatCircleDist(l1.ra," 2354 "l1.dec,c.centralRa,c.centralDec)<4) as mf" % 2355 (self.programmeID, self.programme.getMergeLogTable())) 2356 whereStr = ("and l.frameSetID=mf.frameSetID and mf.multiframeID=m.multiframeID") 2357 epochFrameList = self.releaseDB.query(selectStr, fromStr, whereStr) 2358 2359 2360 mfIDs = [epochFrame.multiframeID for epochFrame in epochFrameList] 2361 mfID = mfIDs[0] 2362 self.addPbMetadata(primary, band, 2363 self._metaSchema["Multiframe"], mfID) 2364 #self.addPbMetadata(primary, self.band, 2365 # self._metaSchema["MultiframeEsoKeys"], mfID) 2366 #self.addPbMetadata(fitsTable, self.band, 2367 # self._metaSchema["MultiframeDetector"], mfID, 2) 2368 #self.addPbMetadata(fitsTable, self.band, 2369 # self._metaSchema["CurrentAstrometry"], mfID, 2) 2370 fileProducts = [EsoRelease.ImageField(*epochFrame) for epochFrame in epochFrameList] 2371 2372 frames = utils.arbSort(fileProducts, mfIDs, 2373 key=attrgetter('fileID')) 2374 self.addPrimaryFrameKeys(primary, frames[0]) 2375 self.addStandardKeys(primary.header, fitsTable.header, frames, bandList, 2376 tableType, field)
2377 2378 #-------------------------------------------------------------------------- 2379
2380 - def updateVarCatHeaders(self, tableType, field, primary, fitsTable, bandList):
2381 """ 2382 """ 2383 2384 2385 2386 # Query MergeLog for passband-specific metadata 2387 # For all synoptic passbands.... 2388 2389 2390 2391 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 2392 isCorrel = 'SynopticSource' in self.programme.getSynopticBestMatchTable() 2393 # filters = [col.name.replace("mfID", '') for col in mergeLog.columns 2394 # if "mfID" in col.name] 2395 if 'MD' not in tableType: 2396 fieldStr = "frameSetID=%s" % field.fileID 2397 fieldInfo = self.fieldInfoDict[field.fileID] 2398 else: 2399 fieldStr = "" 2400 if self.fieldIDs: 2401 # @TODO: convert to correct list 2402 2403 # 2404 fSetIDs = [queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2405 for fieldID in self.fieldIDs] 2406 frameSetIDs = ','.join([str(fSetID) for fSetID in fSetIDs if fSetID not in self.skipFieldList]) 2407 self.nPointings = len(frameSetIDs.split(',')) 2408 fieldStr = "frameSetID in (%s)" % frameSetIDs 2409 fieldInfo = None 2410 2411 isPassband = False 2412 2413 2414 for column, value in self.queryMetadata(mergeLog, fieldStr): 2415 isPassband = isPassband or "mfid" in column.name.lower() 2416 if not isPassband: 2417 self.addKey(primary, column, value) 2418 2419 # Get all epoch images from best-match table 2420 # "fileName fileID ra dec filterName frameType confID productID" 2421 epochFrameList = [] 2422 for band in bandList: 2423 2424 selectStr = ("distinct fileName,m.multiframeID,%s as ra,%s as dec,m.filterName," 2425 "m.frameType,m.confID,%s as productID" % (dbc.realDefault(), dbc.realDefault(), 2426 dbc.intDefault())) 2427 2428 2429 if isCorrel: 2430 fromStr = ("%s as b,%s as ml,Multiframe as m,%s as s,%s as l" % 2431 (self.programme.getSynopticBestMatchTable(), 2432 self.programme.getSynopticMergeLogTable(), 2433 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2434 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2435 "s.priOrSec=s.frameSetID) and b.synFrameSetID=ml.synFrameSetID and " 2436 "b.synFrameSetID>0 and ml.%smfID=m.multiframeID and l.frameSetID=" 2437 "s.frameSetID and m.multiframeID>0" % (fieldStr, band)) 2438 2439 if self.isCutOut: 2440 fromStr += (",(select frameSetID,synFrameSetID from %s as l1,%s as sl1 where " 2441 "dbo.fGreatCircleDist(l1.ra,l1.dec,sl1.ra,sl1.dec)<4) as mf" % 2442 (self.programme.getMergeLogTable(), self.programme.getSynopticMergeLogTable())) 2443 whereStr += (" and l.frameSetID=mf.frameSetID and mf.synFrameSetID=ml.synFrameSetID") 2444 2445 else: 2446 fromStr = ("%s as b,Multiframe as m,Filter as f,%s as s, %s as l" % 2447 (self.programme.getSynopticBestMatchTable(), 2448 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2449 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2450 "s.priOrSec=s.frameSetID) and b.multiframeID=m.multiframeID and " 2451 "m.filterID=f.filterID and f.shortName='%s' and l.frameSetID=" 2452 "s.frameSetID" % (fieldStr, band)) 2453 if self.isCutOut: 2454 fromStr += (",(select frameSetID,c.multiframeID from %s as l1,CurrentAstrometry as c," 2455 "ProgrammeFrame as p,Multiframe as m where p.programmeID=%s and " 2456 "p.multiframeID=m.multiframeID and m.frameType='tilestack' " 2457 "and m.multiframeID=c.multiframeID and dbo.fGreatCircleDist(l1.ra," 2458 "l1.dec,c.centralRa,c.centralDec)<4) as mf" % 2459 (self.programmeID, self.programme.getMergeLogTable())) 2460 whereStr = ("and l.frameSetID=mf.frameSetID and mf.multiframeID=m.multiframeID") 2461 epochFrameList += self.releaseDB.query(selectStr, fromStr, whereStr) 2462 2463 mfIDs = [epochFrame.multiframeID for epochFrame in epochFrameList] 2464 #mfID = mfIDs[0] 2465 #self.addPbMetadata(primary, self.band, 2466 # self._metaSchema["Multiframe"], mfID) 2467 #self.addPbMetadata(primary, self.band, 2468 # self._metaSchema["MultiframeEsoKeys"], mfID) 2469 #self.addPbMetadata(fitsTable, self.band, 2470 # self._metaSchema["MultiframeDetector"], mfID, 2) 2471 #self.addPbMetadata(fitsTable, self.band, 2472 # self._metaSchema["CurrentAstrometry"], mfID, 2) 2473 2474 fileProducts = [EsoRelease.ImageField(*epochFrame) for epochFrame in epochFrameList] 2475 2476 frames = utils.arbSort(fileProducts, mfIDs, 2477 key=attrgetter('fileID')) 2478 self.addPrimaryFrameKeys(primary, frames[0]) 2479 self.addStandardKeys(primary.header, fitsTable.header, frames, bandList, 2480 tableType, field)
2481 2482 2499 2500 2501 2502 #--------------------------------------------------------------------------
2503 - def checkEsoRelease(self, curEsoInfo):
2504 """ 2505 """ 2506 oldProductCode, oldFromDb = curEsoInfo 2507 if oldFromDb != self.releaseDB.database and not CLI.isConfirmed( 2508 "You are using a different release database %s that was used for " 2509 "an earlier phase of this EsoRelease - %s %s. The earlier " 2510 "database was %s. Please confirm this is correct before continuing" 2511 % (self.releaseDB.database, self.programme.getAcronym(), 2512 self.releaseNum, oldFromDb)): 2513 exit() 2514 2515 oldPCBits = set([bit for bit in range(max(self.prodBits)) if oldProductCode & (2 ** bit) > 0]) 2516 newPCBits = set([bit for bit in range(max(self.prodBits)) if self.esoProductTypes & (2 ** bit) > 0]) 2517 # Repeating bits 2518 # repeatedBits = newPCBits.intersection(oldPCBits) 2519 newBits = newPCBits.difference(oldPCBits) 2520 self.newProdTypes = oldProductCode + sum([2 ** bit for bit in newBits])
2521 2522 #-------------------------------------------------------------------------- 2523
2524 - def outgestMultiEpoch(self):
2525 """ 2526 """ 2527 2528 2529 self.catType = 'pMultCat' 2530 # Which BM table is used for Var table 2531 isCorrel = 'SynopticSource' in self.programme.getSynopticBestMatchTable() 2532 rFilters = self.esoProgReq.mEpochFilters.split(',') 2533 reqFilters = self.releaseDB.query( 2534 selectStr="r.filterID,shortName", 2535 fromStr="RequiredFilters as r,Filter as f", 2536 whereStr="f.filterID=r.filterID and programmeID=%s and " 2537 "isSynoptic=1%s" % (self.programmeID, 2538 " AND shortName in (%s)" % ','.join("'%s'" % flt for flt in rFilters) if rFilters else "")) 2539 2540 2541 if self.fieldIDs: 2542 frameSetIDs = ','.join( 2543 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 2544 for fieldID in self.fieldIDs 2545 if queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2546 not in self.skipFieldList) 2547 2548 mergeLog = self._progSchema.get(self.programme.getMergeLogTable()) 2549 fileIdStr = mergeLog.primaryKey() 2550 self.splitDetTables = self.releaseDB.sysc.loadDatabase in self.releaseDB.database 2551 detectionTable = self.programme.getDetectionTable().replace('Raw', '') 2552 detTable = self._progSchema.get(detectionTable) 2553 rTable = self.programme.getDetectionTable().replace('Raw', '') 2554 pTable = self.programme.getDetectionTable().replace('Raw', '') 2555 if self.splitDetTables: 2556 detectionTable = [self.programme.getDetectionTable(), 2557 self.programme.getDetectionTable().replace('Raw', 'Astrometry'), 2558 self.programme.getDetectionTable().replace('Raw', 'Photometry')] 2559 rTable = detectionTable[0] 2560 pTable = detectionTable[2] 2561 joinDetTabStr = '' 2562 for attr in ['multiframeID', 'extNum', 'seqNum']: 2563 for ii, tableName in enumerate(detectionTable): 2564 if ii > 0: 2565 joinDetTabStr += ' AND %s.%s=%s.%s' % (tableName, attr, 2566 detectionTable[0], attr) 2567 2568 2569 sourceTable = self._progSchema[self.programme.getSourceTable()] 2570 synSrcTable = self._progSchema.get(self.programme.getSynopticSourceTable()) if isCorrel else None 2571 2572 bmTable = self._progSchema.get(self.programme.getSynopticBestMatchTable()) 2573 synMlTable = self._progSchema.get(self.programme.getSynopticMergeLogTable()) if isCorrel else None 2574 fields = self.releaseDB.query( 2575 selectStr=fileIdStr + " AS fileID, ra*24/360 AS ra, dec", 2576 fromStr=mergeLog, 2577 whereStr=DepCodes.selectNonDeprecated 2578 + (" AND frameSetID IN (%s)" % frameSetIDs 2579 if self.fieldIDs else ""), 2580 orderBy=fileIdStr) 2581 self.currentRowNumber = self.archive.query("currentRowNumber", 2582 "EsoReleaseMEpoch", "programmeID=%s" % self.programmeID, 2583 firstOnly=True, default=0) 2584 for filterID, band in reqFilters: 2585 addUcd = ';%s' % self.bandUcdDict[band.lower()] 2586 mPhotDir = os.path.join(self._fileDir, "mPhot_%s" % band.upper()) 2587 columns = [] 2588 utils.ensureDirExist(os.path.join(self._fileDir, "mPhot_%s" % band.upper())) 2589 fnFormat = FileNameFormat(self.programme, self.releaseNum, 2590 mPhotDir, fileType="mPhot", filters=band.lower()) 2591 fnFormatMD = FileNameFormat(self.programme, self.releaseNum, 2592 mPhotDir, fileType="mPhotMetaData", filters=band.lower()) 2593 2594 # Get columns and selectStr. 2595 iauNameSel, iauNameAttr = self.getIAUNameColSel(sourceTable.name, 2596 ucdTag='meta.id') 2597 2598 photSel = ('(ROW_NUMBER() OVER (ORDER BY %s) +%s) AS PHOT_ID ') 2599 # name datatype column 2600 photColAttr = schema.Attribute() 2601 photColAttr.name = 'PHOT_ID' 2602 photColAttr.dataType = 'bigint' 2603 photColAttr.tag = {'--/U':'', '--/C': 'meta.id;meta.main', 2604 '--/D':'UID for observation. Combination of source and detection UIDs', 2605 '--/B':''} 2606 columns = [photColAttr] 2607 selectStr = photSel 2608 # "%s.sourceID,%s.%smjd" % (sourceTable.name, synSrcTable.name, band) 2609 # if isCorrel else "s.sourceID,%s.mjd" % (sourceTable.name, detTable.name)) 2610 columns.append(iauNameAttr) 2611 selectStr += ', %s' % iauNameSel 2612 columns.append(bmTable.attribute["sourceID"]) 2613 selectStr += ', %s.sourceID' % bmTable.name 2614 # @TODO: Use all epochs, not just good measurements. 2615 # @TODO: Use running number, not objID 2616 # @TODO: Use mjdObs when no detection. 2617 if isCorrel: 2618 columns.extend([synSrcTable.attribute["%sMjd" % band.lower()], 2619 synSrcTable.attribute["%sAperMag3" % band.lower()], 2620 synSrcTable.attribute["%sAperMag3Err" % band.lower()]]) 2621 selectStr += ', %s.%smjd AS mjd, %s.%saperMag3 AS %sMAG, %s.%saperMag3Err AS %sERR' % (synSrcTable.name, band.upper(), synSrcTable.name, band.upper(), band.upper(), synSrcTable.name, band.upper(), band.upper()) 2622 if self.programmeID == 130: 2623 columns.extend([synSrcTable.attribute["%sppErrBits" % band.lower()]]) 2624 selectStr += ', %s.%sppErrBits AS %sPPERRBITS' % (synSrcTable.name, band.upper(), band.upper()) 2625 # If splitDetectionTable... 2626 if self.splitDetTables: 2627 fromStr = '%s, %s, %s, %s' % (self.programme.getSourceTable(), self.programme.getSynopticBestMatchTable(), self.programme.getSynopticMergeLogTable(), self.programme.getSynopticSourceTable()) 2628 fromStr += ', ' + ','.join(detectionTable) 2629 whereStr = ("%s.sourceID = %s.sourceID AND %s.synFrameSetID=" 2630 "%s.synFrameSetID AND %s.synSeqNum=%s.synSeqNum AND " 2631 "%s.synFrameSetID=%s.synFrameSetID AND " 2632 "%s.%smfID=%s.multiframeID AND %s.%seNum=%s.extNum AND " 2633 "%s.%sseqNum=%s.seqNum AND %s.%sseqNum>0 %s" % (sourceTable.name, 2634 bmTable.name, bmTable.name, synSrcTable.name, bmTable.name, 2635 synSrcTable.name, bmTable.name, synMlTable.name, 2636 synMlTable.name, band, detectionTable[0], synMlTable.name, 2637 band, detectionTable[0], synSrcTable.name, band, 2638 detectionTable[0], synSrcTable.name, band, joinDetTabStr)) 2639 else: 2640 fromStr = '%s, %s, %s, %s, %s' % (self.programme.getSourceTable(), self.programme.getSynopticBestMatchTable(), self.programme.getSynopticMergeLogTable(), self.programme.getSynopticSourceTable(), detectionTable) 2641 whereStr = ("%s.sourceID = %s.sourceID AND %s.synFrameSetID=" 2642 "%s.synFrameSetID AND %s.synSeqNum=%s.synSeqNum AND " 2643 "%s.synFrameSetID=%s.synFrameSetID AND " 2644 "%s.%smfID=%s.multiframeID AND %s.%seNum=%s.extNum AND " 2645 "%s.%sseqNum=%s.seqNum AND %s.%sseqNum>0" % (sourceTable.name, 2646 bmTable.name, bmTable.name, synSrcTable.name, bmTable.name, 2647 synSrcTable.name, bmTable.name, synMlTable.name, 2648 synMlTable.name, band, detTable.name, synMlTable.name, 2649 band, detTable.name, synSrcTable.name, band, 2650 detTable.name, synSrcTable.name, band)) 2651 orderBy = "%s.sourceID,%s.%smjd" % (sourceTable.name, 2652 synSrcTable.name, band) 2653 else: 2654 2655 columns.extend([detTable.attribute["mjd"], detTable.attribute["aperMag3"], detTable.attribute["aperMag3err"]]) 2656 selectStr += ', %s.mjd AS mjd, %s.aperMag3 AS %sMAG, %s.aperMag3err AS %sERR' % (rTable, pTable, band.upper(), pTable, band.upper()) 2657 if self.splitDetTables: 2658 fromStr = '%s, %s' % (self.programme.getSourceTable(), self.programme.getSynopticBestMatchTable()) 2659 fromStr += ', ' + ','.join(detectionTable) 2660 whereStr = ("%s.sourceID = %s.sourceID AND %s.multiframeID=" 2661 "%s.multiframeID AND %s.extNum=%s.extNum AND %s.seqNum=" 2662 "%s.seqNum AND %s.filterID=%s AND %s.seqNum>0 %s" % (sourceTable.name, 2663 bmTable.name, bmTable.name, detectionTable[0], bmTable.name, 2664 detectionTable[0], bmTable.name, detectionTable[0], detectionTable[0], 2665 filterID, detectionTable[0], joinDetTabStr)) 2666 else: 2667 fromStr = '%s, %s, %s' % (self.programme.getSourceTable(), self.programme.getSynopticBestMatchTable(), detectionTable) 2668 whereStr = ("%s.sourceID = %s.sourceID AND %s.multiframeID=" 2669 "%s.multiframeID AND %s.extNum=%s.extNum AND %s.seqNum=" 2670 "%s.seqNum AND %s.filterID=%s AND %s.seqNum>0" % (sourceTable.name, 2671 bmTable.name, bmTable.name, detTable.name, bmTable.name, 2672 detTable.name, bmTable.name, detTable.name, detTable.name, 2673 filterID, detTable.name)) 2674 orderBy = "%s.sourceID,%s.mjd" % (sourceTable.name, detectionTable) 2675 if self.isQuickRun: 2676 selectStr = "TOP 10 " + selectStr 2677 for column in columns: 2678 if '--/C' in column.tag and 'meta.id' not in column.tag['--/C']: 2679 column.tag['--/C'] += addUcd 2680 columns[0].name = 'PHOT_ID' 2681 columns[3].name = 'MJD' 2682 columns[4].name = '%sMAG' % band.upper() 2683 columns[5].name = '%sERR' % band.upper() 2684 sIDIndex = [index for index, column in enumerate(columns) 2685 if column.name == 'sourceID'][0] 2686 sIDtag = columns[sIDIndex].tag 2687 for key in sIDtag: 2688 if key == '--/C': 2689 sIDtag[key] = 'meta.id' 2690 columns[sIDIndex].tag = sIDtag 2691 raIndex = [index for index, column in enumerate(columns) 2692 if column.name == 'ra'] 2693 decIndex = [index for index, column in enumerate(columns) 2694 if column.name == 'dec'] 2695 if raIndex: 2696 columns[raIndex[0]].name = 'ra2000' 2697 if decIndex: 2698 columns[decIndex[0]].name = 'dec2000' 2699 if self.esoProgReq.photSys == 'AB': 2700 vegaToAB = self.releaseDB.query( 2701 "vegaToAB", "Filter", "filterID=%s order by filterID" 2702 % filterID) 2703 2704 selectStr = self.modifyToAB(selectStr, vegaToAB, filters=[band]) 2705 if not self.isRegion: 2706 query = SelectSQL(selectStr, fromStr) 2707 filePath = fnFormatMD.getFilePath(None) 2708 self.updateDirDict(filePath) 2709 outgest = Process(target=self.outgestSourceFile, 2710 args=(filePath, columns, query, None, [band], 'MMD')) 2711 if not os.path.exists(filePath) or self.piOnly: 2712 outgest.start() 2713 outgest.join() 2714 if outgest.exitcode: 2715 raise EsoRelease.CuError("Forked outgest process failed." 2716 " Please check stdout.") 2717 Logger.addMessage("Creating multi-epoch table files for %s %s-band frame sets..." 2718 % (len(fields), band.upper())) 2719 fields = [] if self.piOnly else fields 2720 progress = ForLoopMonitor(fields) 2721 2722 for field in fields: 2723 finalSelectStr = selectStr % (orderBy, self.currentRowNumber) 2724 if self.isRegion: 2725 mPhotDir = os.path.join(self._fileDir, "mPhot_%s" % band.upper(), self.regionDict[field.fileID][0]) 2726 fnFormat = FileNameFormat(self.programme, self.releaseNum, 2727 mPhotDir, fileType="mPhot", filters=band.lower()) 2728 utils.ensureDirExist(mPhotDir) 2729 filePath = fnFormat.getFilePath(field) 2730 self.updateDirDict(filePath) 2731 query = SelectSQL(finalSelectStr, fromStr, 2732 where=whereStr + " AND %s.%s=%s" % (sourceTable.name, fileIdStr, field.fileID)) 2733 # Run each file outgest as a separate process to avoid memory leaks 2734 outgest = Process(target=self.outgestSourceFile, 2735 args=(filePath, columns, query, field, [band], 2736 'MFSC')) 2737 if not os.path.exists(filePath) and field.fileID not in self.skipFieldList: 2738 outgest.start() 2739 outgest.join() 2740 if field.fileID not in self.skipFieldList: 2741 self.mPhotFilesDict[field.fileID].append(filePath) 2742 if outgest.exitcode: 2743 raise EsoRelease.CuError("Forked outgest process failed." 2744 " Please check stdout.") 2745 if os.path.exists(filePath): 2746 hdulist = fits.open(filePath) 2747 self.currentRowNumber += hdulist[1].header['NAXIS2'] 2748 hdulist.close() 2749 progress.testForOutput()
2750 2751 #-------------------------------------------------------------------------- 2752
2753 - def outgestVariables(self):
2754 """ 2755 """ 2756 self.catType = 'varCat' 2757 mergeLog = self._progSchema.get(self.programme.getMergeLogTable()) 2758 sourceTable = self._progSchema[self.programme.getSourceTable()] 2759 varTable = self._progSchema.get(self.programme.getVariabilityTable()) 2760 2761 fileIdStr = mergeLog.primaryKey() 2762 if self.checkifExternalVarTables(): 2763 return 2764 2765 if self.fieldIDs: 2766 frameSetIDs = ','.join( 2767 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 2768 for fieldID in self.fieldIDs) 2769 2770 fields = self.releaseDB.query( 2771 selectStr=fileIdStr + " AS fileID, ra*24/360 AS ra, dec", 2772 fromStr=mergeLog, 2773 whereStr=DepCodes.selectNonDeprecated 2774 + (" AND frameSetID IN (%s)" % frameSetIDs 2775 if self.fieldIDs else ""), 2776 orderBy=fileIdStr) 2777 2778 2779 2780 reqFilters = self.releaseDB.query( 2781 selectStr="r.filterID,shortName", 2782 fromStr="RequiredFilters as r,Filter as f", 2783 whereStr="f.filterID=r.filterID and programmeID=%s and " 2784 "isSynoptic=1" % self.programmeID) 2785 utils.ensureDirExist(os.path.join(self._fileDir, "varCat")) 2786 fnFormat = FileNameFormat(self.programme, self.releaseNum, 2787 os.path.join(self._fileDir, "varCat"), fileType="varCat", filters=''.join( 2788 [band.lower() for _fID, band in reqFilters])) 2789 fnFormatMD = FileNameFormat(self.programme, self.releaseNum, 2790 os.path.join(self._fileDir, "varCat"), fileType="varCatMetaData", 2791 filters=''.join([band.lower() for _fID, band in reqFilters])) 2792 2793 2794 iauNameSel, iauNameAttr = self.getIAUNameColSel(sourceTable.name) 2795 columns = [iauNameAttr] 2796 selectStr = iauNameSel 2797 columns.append(varTable.attribute["sourceID"]) 2798 selectStr += ', %s.sourceID' % varTable.name 2799 bandList = [band for _fID, band in reqFilters] 2800 for band in bandList: 2801 columns.append(varTable.attribute["%smeanMag" % band.lower()]) 2802 selectStr += ', %s.%s' % (varTable.name, columns[-1]) 2803 ampNameSel = "(%sMaxMag-%sMinMag) as %sAmpl" % (band, band, band) 2804 ampNameAttr = schema.Attribute() 2805 ampNameAttr.name = '%sAmpl' % band 2806 ampNameAttr.dataType = 'real' 2807 bandSpec = (';%s;%s' % (self.bandUcdDict[band], self.bandUcdDict[band]) 2808 if band in self.bandUcdDict else ';em.IR.NIR;em.IR.NIR') 2809 ampNameAttr.tag = {'--/U':'Mag', '--/C':'src.var.amplitude' + bandSpec, 2810 '--/D':'Amplitude of variable in %s-band' % band} 2811 columns.append(ampNameAttr) 2812 selectStr += ', %s' % (ampNameSel) 2813 columns.append(varTable.attribute["%sprobVar" % band.lower()]) 2814 selectStr += ', %s.%s' % (varTable.name, columns[-1]) 2815 2816 varAttr = schema.Attribute() 2817 varAttr.name = "varType" 2818 varAttr.dataType = 'varchar(25)' 2819 varAttr.tag = {'--/U':'', '--/C':'meta.code.class;src.var', '--/D':'Variability type, e.g. Cepheid'} 2820 # columns = columns + [varAttr] 2821 # selectStr += ", 'NONE'" 2822 if selectStr and self.isQuickRun: 2823 selectStr = "TOP 10 " + selectStr 2824 2825 if self.esoProgReq.photSys == 'AB': 2826 filters = [band for _filterID, band in reqFilters] 2827 vegaToAB = self.releaseDB.query( 2828 "vegaToAB", "Filter", "shortName in (%s) order by filterID" 2829 % ','.join(["'%s'" % shtName for shtName in filters])) 2830 2831 selectStr = self.modifyToAB(selectStr, vegaToAB, filters=filters) 2832 fromStr = "%s,%s" % (self.programme.getSourceTable(), 2833 self.programme.getVariabilityTable()) 2834 whereStr = "%s.sourceID=%s.sourceID AND %s.%s" % (sourceTable.name, 2835 varTable.name, varTable.name, self.esoProgReq.varSelString) 2836 2837 if not self.isRegion: 2838 query = SelectSQL(selectStr, fromStr) if selectStr else None 2839 filePath = fnFormatMD.getFilePath(None) 2840 self.updateDirDict(filePath) 2841 outgest = Process(target=self.outgestSourceFile, 2842 args=(filePath, columns, query, None, bandList, 'VMD')) 2843 if not os.path.exists(filePath) or self.piOnly: 2844 outgest.start() 2845 outgest.join() 2846 Logger.addMessage("Creating variability table files for %s frame sets..." 2847 % len(fields)) 2848 fields = [] if self.piOnly else fields 2849 progress = ForLoopMonitor(fields) 2850 for field in fields: 2851 if self.isRegion: 2852 fnFormat = FileNameFormat(self.programme, self.releaseNum, 2853 os.path.join(self._fileDir, "varCat/%s" % self.regionDict[field][0]), 2854 fileType="varCat", filters=''.join( 2855 [band.lower() for _fID, band in reqFilters])) 2856 utils.ensureDirExist(os.path.join(self._fileDir, "varCat/%s" % self.regionDict[field][0])) 2857 filePath = fnFormat.getFilePath(field) 2858 self.updateDirDict(filePath) 2859 query = (SelectSQL(selectStr, fromStr, 2860 where=whereStr + " AND %s.%s=%s" % (sourceTable.name, 2861 fileIdStr, field.fileID)) if selectStr else None) 2862 varsExist = self.releaseDB.query("COUNT(*)", fromStr, 2863 whereStr + " AND %s.%s=%s" % (sourceTable.name, 2864 fileIdStr, field.fileID), firstOnly=True) > 0 2865 # Run each file outgest as a separate process to avoid memory leaks 2866 outgest = Process(target=self.outgestSourceFile, 2867 args=(filePath, columns, query, field, bandList, 2868 'VFSC')) 2869 if varsExist and not os.path.exists(filePath) and field.fileID not in self.skipFieldList: 2870 outgest.start() 2871 outgest.join() 2872 if outgest.exitcode: 2873 raise EsoRelease.CuError("Forked outgest process failed." 2874 " Please check stdout.") 2875 2876 progress.testForOutput()
2877 2878 #-------------------------------------------------------------------------- 2879
2880 - def isExternalMainTable(self, externCat):
2881 """ 2882 """ 2883 2884 table = schema.parseTables(externCat.sqlSchemaFile, [externCat.tableName])[0] 2885 mainTableName = None 2886 for constraint in table.constraints: 2887 # e.g. fk_Provenance_combiframeID_to_Multiframe_multiframeID 2888 # # possibly have to make tougher constraint. 2889 if constraint.name.startswith("fk_"): 2890 # Parse 2891 mainTableName = constraint.name.split('_')[4] 2892 return mainTableName
2893 #-------------------------------------------------------------------------- 2894
2895 - def outgestExternalCats(self):
2896 """ 2897 """ 2898 Field = namedtuple("Field", "fieldID fileID ra dec") 2899 2900 externalCatInfo = self.archive.query( 2901 "*", "ExternalProduct", "programmeID=%s AND productType != 'mosaic'" % 2902 self.programmeID) 2903 reqFilters = self.releaseDB.query( 2904 selectStr="r.filterID,shortName", 2905 fromStr="RequiredFilters as r,Filter as f", 2906 whereStr="f.filterID=r.filterID and programmeID=%s" % self.programmeID) 2907 mergeLog = self._progSchema.get(self.programme.getMergeLogTable()) 2908 sourceTable = self._progSchema[self.programme.getSourceTable()] 2909 fileIdStr = mergeLog.primaryKey() 2910 for externalCat in externalCatInfo: 2911 # Each has a neighbour table 2912 self.mainTableName = self.isExternalMainTable(externalCat) 2913 self.catType = externalCat.productType 2914 extTable = schema.parseTables(externalCat.sqlSchemaFile, 2915 ["%s" % externalCat.tableName])[0] 2916 bandList = self.findFiltersUsed(extTable) 2917 2918 neighTableName = "%sX%s" % (self.programme.getSourceTable(), 2919 self.mainTableName.replace(self.programme.getAcronym(), '') 2920 if self.mainTableName else 2921 externalCat.tableName.replace(self.programme.getAcronym(), '')) 2922 neighTable = schema.parseTables(self.programme.getAttr("neighboursSchema"), 2923 [neighTableName])[0] 2924 self.hasNeighTable = self.releaseDB.existsTable(neighTableName) and not self.noExternNeigh 2925 self.isSourceMatched = externalCat.sourceMatched == 1 2926 isVariableTable = "variable" in externalCat.tableName.lower() 2927 tableTypeRoot = "V" if isVariableTable else "E" 2928 matchDist = None 2929 self.reference = (externalCat.reference 2930 if externalCat.reference != dbc.charDefault() else None) 2931 if self.hasNeighTable: 2932 matchDist = self.releaseDB.query("60*joinCriterion", "RequiredNeighbours", 2933 "neighbourTable='%s'" % neighTableName, 2934 firstOnly=True) 2935 # neighTable = schema.parseTables(self.programme.getNeighboursSchema(), 2936 # [neighTableName])[0] 2937 2938 extSIDName = self.releaseDB.query("sourceIDName", "ProgrammeTable", 2939 "programmeID=%s and tableName='%s'" % (self.programmeID, 2940 self.mainTableName if self.mainTableName else externalCat.tableName), 2941 firstOnly=True) 2942 if self.fieldIDs: 2943 frameSetIDs = ','.join( 2944 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 2945 for fieldID in self.fieldIDs) 2946 2947 # @TODO: do field selection on fieldID from ExternalProduct catalogues 2948 # 2949 2950 2951 2952 extFields = self.archive.query( 2953 selectStr="fieldID, centralRa, centralDec", 2954 fromStr="ExternalProductCatalogue", 2955 whereStr="productType='%s'" % externalCat.productType) 2956 2957 2958 2959 availFields = self.releaseDB.query( 2960 selectStr="%s as fileIdStr, ra, dec" % fileIdStr, 2961 fromStr=mergeLog, 2962 whereStr=DepCodes.selectNonDeprecated 2963 + (" AND frameSetID IN (%s)" % frameSetIDs 2964 if self.fieldIDs else ""), 2965 orderBy=fileIdStr) 2966 2967 fields = [Field(ef.fieldID, af.fileIdStr, af.ra * 24 / 360., af.dec) 2968 for ef in extFields for af in availFields if 2969 astro.angularSep([math.radians(ef.centralRa), math.radians(ef.centralDec)], 2970 [math.radians(af.ra), math.radians(af.dec)]) < math.radians(0.5 / 60.)] 2971 2972 2973 # fields = self.releaseDB.query( 2974 # selectStr=fileIdStr + " AS fileID, ra*24/360 AS ra, dec", 2975 # fromStr=mergeLog, 2976 # whereStr=DepCodes.selectNonDeprecated 2977 # + (" AND frameSetID IN (%s)" % frameSetIDs 2978 # if self.fieldIDs else ""), 2979 # orderBy=fileIdStr) 2980 2981 2982 2983 2984 utils.ensureDirExist(os.path.join(self._fileDir, externalCat.productType)) 2985 fnFormat = FileNameFormat(self.programme, self.releaseNum, 2986 os.path.join(self._fileDir, externalCat.productType), 2987 fileType=externalCat.productType, filters=''.join( 2988 [band.lower() for _fID, band in reqFilters])) 2989 fnFormatMD = FileNameFormat(self.programme, self.releaseNum, 2990 os.path.join(self._fileDir, externalCat.productType), 2991 fileType="%sMetaData" % externalCat.productType, 2992 filters=''.join([band.lower() for _fID, band in reqFilters])) 2993 2994 # Get columns and selectStr. 2995 iauNameSel, iauNameAttr = self.getIAUNameColSel(sourceTable.name) 2996 columns = [iauNameAttr] 2997 selectStr = "%s as iauName" % iauNameSel 2998 columns.append(sourceTable.attribute["sourceID"]) 2999 selectStr += ', %s.sourceID' % sourceTable.name 3000 iauNameSel, iauNameAttr = self.getIAUNameColSel(extTable.name) 3001 selectStr2 = "%s as iauName, %s as sourceID" % (iauNameSel, dbc.intDefault()) 3002 fromStr = "%s" % self.programme.getSourceTable() 3003 if self.isSourceMatched: 3004 matchDist = 3. / 60. 3005 3006 3007 whereStr = ("%s.sourceID=%s.masterObjID AND %s.%s=%s.slaveObjID " 3008 "AND %s.distanceMins<%s AND %s.distanceMins in (" 3009 "SELECT min(distanceMins) FROM %s as x WHERE x.slaveObjID=" 3010 "%s.slaveObjID)" % (sourceTable.name, neighTableName, 3011 extTable.name, extSIDName, neighTableName, neighTableName, 3012 matchDist, neighTableName, neighTableName, neighTableName) 3013 if self.hasNeighTable else "") 3014 3015 whereStr2 = ("%s.sourceID=%s.masterObjID AND %s.%s=%s.slaveObjID " 3016 "AND %s.distanceMins>=%s" % (sourceTable.name, neighTableName, 3017 extTable.name, extSIDName, neighTableName, neighTableName, 3018 matchDist) 3019 if self.hasNeighTable else "") 3020 whereStr3 = ("%s.%s NOT IN (SELECT slaveObjID FROM %s as x,%s as e " 3021 "where e.fieldID='@FIELD' and e.%s=x.slaveObjID)" 3022 % (extTable.name, extSIDName, neighTableName, extTable.name, extSIDName)) 3023 3024 # if 'variables' in extTable.name.lower(): 3025 # varTable = self._progSchema[self.programme.getVariabilityTable()] 3026 # fromStr += ', %s' % varTable.name 3027 # whereStr += ("%s%s.sourceID=%s.sourceID" % ( 3028 # " AND " if len(whereStr) > 1 else "", sourceTable.name, 3029 # varTable.name)) 3030 # varReqFilters = self.releaseDB.query( 3031 # selectStr="r.filterID,shortName", 3032 # fromStr="RequiredFilters as r,Filter as f", 3033 # whereStr="f.filterID=r.filterID and programmeID=%s and " 3034 # "isSynoptic=1" % self.programmeID) 3035 # for _filterID, band in varReqFilters: 3036 # columns.append(varTable.attribute["%smeanMag" % band.lower()]) 3037 # selectStr += ', %s.%s' % (varTable.name, columns[-1]) 3038 # ampNameSel = "(%sMaxMag-%sMinMag) as %sAmpl" % (band, band, band) 3039 # ampNameAttr = schema.Attribute() 3040 # ampNameAttr.name = '%sAmpl' % band 3041 # ampNameAttr.dataType = 'real' 3042 # ampNameAttr.tag = {'--/U':'Mag', '--/C':'src.var.amplitude;em.IR.NIR;em.IR.NIR', '--/D':'Amplitude of variable in %s-band' % band} 3043 # columns.append(ampNameAttr) 3044 # selectStr += ', %s' % (ampNameSel) 3045 # columns.append(varTable.attribute["%sExpRms" % (band.lower())]) 3046 # selectStr += ', %s.%s as %sMagErr' % (varTable.name, columns[-1], 3047 # band.lower()) 3048 3049 extTabCols = [column for column in extTable.columns 3050 if '--/C' in column.tag] 3051 fromStr2 = extTable.name 3052 if self.hasNeighTable: 3053 if not self.isSourceMatched: 3054 selectStr += ', distanceMins' 3055 selectStr2 += ', %s as distanceMins' % dbc.realDefault() 3056 columns.append(neighTable.attribute["distanceMins"]) 3057 selectStr += ', ' + ', '.join(["%s.%s" % (extTable.name, column.name) for column in extTabCols]) 3058 selectStr2 += ', ' + ', '.join(["%s.%s" % (extTable.name, column.name) for column in extTabCols]) 3059 fromStr += ", %s, %s" % (externalCat.tableName, 3060 neighTableName) 3061 3062 else: 3063 if not self.isSourceMatched: 3064 columns.append(neighTable.attribute["distanceMins"]) 3065 # @TODO: split later... 3066 extSelStr = ', '.join([column.name for column in extTable.columns 3067 if column.tag['--/C']]) 3068 fromStr += " SPLIT %s" % (externalCat.tableName) 3069 3070 columns.extend(extTabCols) 3071 # Replace ra, dec with correct names for ESO 3072 raIndex = [index for index, column in enumerate(columns) 3073 if column.name == 'ra'][0] 3074 decIndex = [index for index, column in enumerate(columns) 3075 if column.name == 'dec'][0] 3076 columns[raIndex].name = 'ra2000' 3077 columns[decIndex].name = 'dec2000' 3078 srcIDIndex = [index for index, column in enumerate(columns) 3079 if column.name == 'sourceID'][0] 3080 columns[srcIDIndex].tag['--/C'] = 'meta.id' 3081 if selectStr and self.isQuickRun: 3082 if self.hasNeighTable: 3083 selectStr = "TOP 10 " + selectStr 3084 else: 3085 selectStr = selectStr + " SPLIT TOP 10 " + extSelStr 3086 3087 elif not self.hasNeighTable: 3088 selectStr = selectStr + " SPLIT " + extSelStr 3089 selectStr2 = selectStr2 + " SPLIT " + extSelStr 3090 if self.esoProgReq.photSys == 'AB': 3091 filters = [band for _filterID, band in reqFilters] 3092 vegaToAB = self.releaseDB.query( 3093 "vegaToAB", "Filter", "shortName in (%s) order by filterID" 3094 % ','.join(["'%s'" % shtName for shtName in filters])) 3095 3096 selectStr = self.modifyToAB(selectStr, vegaToAB, filters=filters) 3097 selectStr2 = self.modifyToAB(selectStr2, vegaToAB, filters=filters) 3098 3099 if not self.isRegion: 3100 query = SelectSQL(selectStr, fromStr) if selectStr else None 3101 filePath = fnFormatMD.getFilePath(None) 3102 self.updateDirDict(filePath) 3103 outgest = Process(target=self.outgestSourceFile, 3104 args=(filePath, columns, query, None, bandList, 3105 tableTypeRoot + 'MD')) 3106 if not os.path.exists(filePath) or self.piOnly: 3107 outgest.start() 3108 outgest.join() 3109 Logger.addMessage("Creating %s table files for %s fields..." 3110 % (externalCat.productType, len(fields))) 3111 3112 3113 fields = [] if self.piOnly else fields 3114 progress = ForLoopMonitor(fields) 3115 for field in fields: 3116 whereStr3_fld = whereStr3.replace("@FIELD", "%s" % field.fieldID) 3117 if self.isRegion: 3118 fnFormat = FileNameFormat(self.programme, self.releaseNum, 3119 os.path.join(self._fileDir, "%s/%s" % 3120 (externalCat.productType, self.regionDict[field][0])), 3121 fileType=externalCat.productType, filters=''.join( 3122 [band.lower() for _fID, band in reqFilters])) 3123 utils.ensureDirExist(os.path.join(self._fileDir, "%s/%s" % 3124 (externalCat.productType, self.regionDict[field][0]))) 3125 filePath = fnFormat.getFilePath(field) 3126 self.updateDirDict(filePath) 3127 frmWhrStr = " AND %s.frameSetID=%s" % (sourceTable.name, field.fileID) 3128 fldWhrStr = ("%s.fieldID='%s'" % 3129 (extTable.name, field.fieldID) 3130 if self.hasNeighTable else "%s.frameSetID=%s SPLIT %s.fieldID='%s'" 3131 % (sourceTable.name, field.fileID, extTable.name, field.fieldID)) 3132 finWhrStr2 = fldWhrStr if len(whereStr2.split()) < 1 else whereStr2 + frmWhrStr + " AND " + fldWhrStr 3133 finWhrStr3 = fldWhrStr if len(whereStr3_fld.split()) < 1 else whereStr3_fld + " AND " + fldWhrStr 3134 finWhrStr = fldWhrStr if len(whereStr.split()) < 1 else whereStr + frmWhrStr + " AND " + fldWhrStr 3135 3136 # COMPOUND QUERY 3137 q2 = SelectSQL(selectStr2, fromStr, where=finWhrStr2) 3138 q3 = SelectSQL(selectStr2, fromStr2, where=finWhrStr3) 3139 if not self.isSourceMatched and self.hasNeighTable: 3140 query = (SelectSQL(selectStr, fromStr, where=finWhrStr + 3141 " UNION %s UNION %s ORDER BY %s.%s" % 3142 (q2, q3, extTable.name, extSIDName)) 3143 if selectStr else None) 3144 else: 3145 query = SelectSQL(selectStr, fromStr, where=finWhrStr) 3146 # Run each file outgest as a separate process to avoid memory leaks 3147 outgest = Process(target=self.outgestSourceFile, 3148 args=(filePath, columns, query, field, bandList, 3149 tableTypeRoot + 'FSC')) 3150 if not os.path.exists(filePath) and field.fileID not in self.skipFieldList: 3151 outgest.start() 3152 outgest.join() 3153 if outgest.exitcode: 3154 raise EsoRelease.CuError("Forked outgest process failed." 3155 " Please check stdout.") 3156 3157 progress.testForOutput()
3158 3159 3160 #-------------------------------------------------------------------------- 3161
3162 - def findFiltersUsed(self, extTable):
3163 """ Uses UCD information to find filters used 3164 """ 3165 3166 bands = set() 3167 for column in extTable.columns: 3168 if '--/C' in column.tag: 3169 if 'em' in column.tag['--/C']: 3170 filterTag = column.tag['--/C'].split(';')[-1] 3171 if filterTag in self.ucdBandDict: 3172 band = self.ucdBandDict[filterTag] 3173 if column.name.lower().startswith(band.lower()): 3174 bands.add(band) 3175 3176 3177 return bands
3178 3179 #-------------------------------------------------------------------------- 3180 3181
3182 - def getExternalColumns(self, fields):
3183 """ 3184 """ 3185 3186 3187 # @FIXME: Get rid of this. Why? 3188 extCatLoc = self.getExternalCatLocation() 3189 3190 externalCatHeader = os.path.join(self.sysc.esoExtDir, extCatLoc.directory, 3191 extCatLoc.fileNameRoot + 'header.csv') 3192 # READ in externalCatHeader and create columns 3193 columns = [] 3194 for colName, dataType, unitTag, ucdTag, descTag, defTag in csv.File(externalCatHeader): 3195 attr = schema.Attribute() 3196 attr.name = colName 3197 attr.dataType = dataType 3198 defTag = defTag if defTag else ' ' 3199 attr.tag = {'--/U':unitTag, '--/C':ucdTag, '--/D':descTag, '--/N':defTag} 3200 columns.append(attr) 3201 3202 # Now make dictionary of pointings. 3203 filesToProcess = os.listdir(os.path.join(self.sysc.esoExtDir, 3204 extCatLoc.directory)) 3205 self.externalPointingDict = dict() 3206 for fileName in filesToProcess: 3207 if os.path.join(self.sysc.esoExtDir, extCatLoc.directory, fileName) != externalCatHeader: 3208 if '%s_er%s_%s' % (self.programme.getAcronym().lower(), self.releaseNum, extCatLoc.productType) in fileName and fileName.endswith('.cat'): 3209 pointing = fileName.split(extCatLoc.fileNameRoot)[1].split('_full.')[0] 3210 fieldID = self.findField(fields, pointing) 3211 if fieldID: 3212 self.externalPointingDict[fieldID] = pointing 3213 return columns
3214 3215 #-------------------------------------------------------------------------- 3216
3217 - def findField(self, fields, pointing):
3218 """ 3219 """ 3220 for fileID, rah, dec in fields: 3221 raString, decString = astro.convertCoordEso(rah * 15., dec, outString=True) 3222 pointString = raString[:2] + 'h' + raString[2:4] + decString[0] + '0' + decString[1:3] + 'd' + decString[3:5] 3223 if pointing == pointString: 3224 return fileID
3225 #-------------------------------------------------------------------------- 3226
3227 - def getExternalCatLocation(self):
3228 """ 3229 """ 3230 return self.archive.query("directory,fileNameRoot", "ExternalProduct", 3231 "programmeID=%s AND releaseNum=%s AND productType='%s'" % 3232 (self.programmeID, self.releaseNum, self.catType), firstOnly=True)
3233 3234 #--------------------------------------------------------------------------
3235 - def getQueryLists(self, query):
3236 """ 3237 """ 3238 if 'UNION' in query.whereStr: 3239 parts = query.whereStr.split(' UNION ') 3240 queryList = [] 3241 queryList.append(SelectSQL(query.selectStr, query.fromStr, parts[0])) 3242 for ii in range(len(parts) - 1): 3243 querySql = parts[ii + 1] 3244 selectStr = querySql.split('FROM')[0].replace('SELECT', '') 3245 fromStr = querySql.split('FROM')[1].split('WHERE')[0] 3246 whereStr = querySql.split('WHERE')[1] 3247 queryList.append(SelectSQL(selectStr, fromStr, whereStr)) 3248 return queryList 3249 3250 else: 3251 return [query]
3252 3253 #--------------------------------------------------------------------------
3254 - def convertSelStr(self, vdfsSelStr, extFromStr, columns):
3255 """ 3256 """ 3257 params = vdfsSelStr.split(',') 3258 index = 0 3259 newParams = [] 3260 while index < len(params): 3261 if params[index].startswith("dbo.fIAUName"): 3262 vdfsTableName = params[index + 1].split('.ra')[0] 3263 param = ','.join(params[index:index + 5]) 3264 param = param.replace(vdfsTableName, extFromStr) 3265 index += 5 3266 else: 3267 param = params[index] 3268 param = param.split('.')[1] if '.' in param else param 3269 index += 1 3270 for column in columns: 3271 if param == column.name: 3272 default = column.tag['--/N'] if '--/N' in column.tag else (dbc.intDefault() if 'int' in column.dataType else dbc.realDefault()) 3273 param = "%s as %s" % (default, column.name) 3274 newParams.append(param) 3275 return ','.join(newParams)
3276
3277 - def convertExternalCats(self, columns, field, tableType, query):
3278 """ 3279 """ 3280 dataNum = {} 3281 if 'MD' not in tableType: 3282 # @TODO redo. 3283 # find matches 3284 finalData = [] 3285 3286 vdfsSelStr, extSelStr = query.selectStr.split(' SPLIT ') 3287 vdfsFromStr, extFromStr = query.fromStr.split(' SPLIT ') 3288 vdfsWhereStr, extWhereStr = (query.whereStr.split(' SPLIT ') 3289 if ' SPLIT ' in query.whereStr else 3290 ["", ""]) 3291 matchDict = self.findVSAMatch(query, field) 3292 # Get VDFS from releaseDB 3293 # Get VDFS from releaseDB) 3294 vdfsData = self.releaseDB.query( 3295 vdfsSelStr, vdfsFromStr, vdfsWhereStr) 3296 3297 indexDict = dict([(qr.sourceID, ii) for ii, qr in enumerate(vdfsData)]) 3298 if not self.isSourceMatched: 3299 selectStr = self.convertSelStr(vdfsSelStr, extFromStr, columns) 3300 addExtData = self.archive.query(selectStr, extFromStr, extWhereStr, orderBy='dec') 3301 3302 # Get external from VSA 3303 extData = self.archive.query(extSelStr, extFromStr, extWhereStr, orderBy='dec') 3304 # merge - This is very inefficient 3305 # Problem this way -- what if more than one 3306 for ii, ed in enumerate(extData): 3307 if ed[0] in matchDict: 3308 if self.isSourceMatched: 3309 vData = vdfsData[indexDict[matchDict[ed[0]]]] 3310 finalData.append(list(vData) + list(ed)) 3311 3312 else: 3313 index, dist = matchDict[ed[0]] 3314 vData = vdfsData[indexDict[index]] 3315 finalData.append(list(vData) + [dist] + list(ed)) 3316 elif not self.isSourceMatched: 3317 finalData.append(list(addExtData[ii]) + [dbc.realDefault()] + list(ed)) 3318 # for vd in vdfsData: 3319 # # Get matching line in external data 3320 # if vd.sourceID in matchDict: 3321 # if self.isSourceMatched: 3322 # eData = extData[indexDict[matchDict[vd.sourceID]]] 3323 # finalData.append(list(vd) + list(eData)) 3324 # else: 3325 # index, dist = matchDict[vd.sourceID] 3326 # eData = extData[indexDict[index]] 3327 # finalData.append(list(vd) + [dist] + list(eData)) 3328 3329 3330 # Now convert... 3331 # @FIXME: CephMode data in dec column 3332 data = dict(zip(columns, zip(*finalData))) 3333 for column in columns: 3334 if column.dataType == 'real' or column.dataType == 'float': 3335 da = numpy.array(data[column]) 3336 dataNum[column] = numpy.where(numpy.abs(numpy.divide(numpy.subtract(da, dbc.realDefault()), dbc.realDefault())) < 0.0001, numpy.nan, da) 3337 else: 3338 dataNum[column] = numpy.array(data[column]) 3339 return self.createTableHdu(columns, tableType, dataNum)
3340 3341 #-------------------------------------------------------------------------- 3342
3343 - def createTableHdu(self, columns, tableType, dataNum):
3344 3345 descriptionDict = self.getDescriptions(columns) 3346 # @FIXME: no null if meta.main 3347 nullDict = self.getNulls(columns) 3348 tableHDU = pyfits.new_table([ 3349 pyfits.Column(name=column.name.upper(), 3350 format=EsoRelease.sqlToFitsDataType[column.dataType], 3351 unit=column.tag.get("--/U", ' '), 3352 null=nullDict[column.name], 3353 disp=column.tag.get("--/C", ' '), 3354 dim=descriptionDict[column.name], 3355 array=dataNum[column] if 'MD' not in tableType else None) 3356 for column in columns]) 3357 3358 # PyFITS limitation hack to get TUCD & TCOMM into the header 3359 ucdKeys = (key for key in tableHDU.header if key.startswith('TDISP')) 3360 for key in ucdKeys: 3361 tableHDU.header.rename_key(key, key.replace('TDISP', 'TUCD')) 3362 3363 commKeys = (key for key in tableHDU.header if key.startswith('TDIM')) 3364 for key in commKeys: 3365 tableHDU.header.rename_key(key, key.replace('TDIM', 'TCOMM')) 3366 [tableHDU.header.update('TINDX%d' % (ii + 1), True, 'Column is indexed', after='TCOMM%d' % (ii + 1)) 3367 for ii, column in enumerate(columns) if '--/B' in column.tag] 3368 3369 return tableHDU
3370 3371 #-------------------------------------------------------------------------- 3372
3373 - def getNulls(self, columns):
3374 """ 3375 """ 3376 excludNames = ["sourceid", "framesetid"] 3377 nullDict = {} 3378 for column in columns: 3379 isMain = ('--/C' in column.tag and 'meta.main' in column.tag['--/C'] or 3380 column.name.lower() in excludNames) 3381 nullDict[column.name] = (column.getDefaultValue() if 3382 EsoRelease.sqlToFitsDataType[column.dataType] in ['B', 'I', 'J', 'K'] 3383 and not isMain else None) 3384 return nullDict
3385 3386 #-------------------------------------------------------------------------- 3387
3388 - def getDescriptions(self, columns):
3389 """ 3390 """ 3391 descDict = {} 3392 for ii, column in enumerate(columns): 3393 if column.tag.get("--/W") and "&ESOtype&" in column.tag.get("--/W"): 3394 desc = column.tag.get("--/W").replace("&ESOtype&", descDict[columns[ii - 1].name].lower()) 3395 else: 3396 desc = column.tag.get("--/W") or column.tag.get("--/D", ' ') 3397 descDict[column.name] = textwrap.wrap(desc, 68)[0] 3398 return descDict
3399
3400 - def findVSAMatch(self, queryString, field):
3401 """ 3402 """ 3403 3404 3405 table = self.programme.getSourceTable() 3406 outgester = Outgester(self.releaseDB, tag=self.shareFileID) 3407 tableName = outgester.tablePath(table) 3408 query = SelectSQL("sourceID,ra,dec", tableName, 3409 where="frameSetID=%s" % field.fileID, orderBy='dec') 3410 # indexDict = dict([(qr, ii) for ii, qr in enumerate( 3411 # self.archive.query("sourceID", table, orderBy='dec'))]) 3412 3413 outPathName = os.path.join(self.sysc.sourcePosTablesPath(), 3414 table + self.shareFileID + ".dat") 3415 utils.ensureDirExist(self.sysc.sourcePosTablesPath()) 3416 outgester.outgestQuery(query, table, outPathName, 3417 createDtdFile=True) 3418 Logger.addMessage("Outgested source table...") 3419 # Get info from VSA 3420 table = self.mainTableName if self.mainTableName else queryString.fromStr.split('SPLIT')[1].split()[0] 3421 outgester = Outgester(self.archive, tag=self.shareFileID) 3422 tableName = outgester.tablePath(table) 3423 # mainID name from ProgrammeTable 3424 ptInfo = self.archive.query("tableID,sourceIDName", "ProgrammeTable", 3425 "programmeID=%s and tableName='%s'" 3426 % (self.programmeID, table), firstOnly=True) 3427 # radius from RequiredNeighbours 3428 matchRadius = 3600.*self.archive.query("joinCriterion", "RequiredNeighbours", 3429 "programmeID=%s AND extTableID=%s AND extProgID=%s and tableID=2" 3430 % (self.programmeID, ptInfo.tableID, self.programmeID), firstOnly=True, 3431 default=0.) 3432 query = SelectSQL("%s,ra,dec" % ptInfo.sourceIDName, tableName, 3433 orderBy='dec') 3434 extOutPathName = os.path.join(self.sysc.sourcePosTablesPath(), 3435 table + self.shareFileID + ".dat") 3436 3437 outgester.outgestQuery(query, table, extOutPathName, 3438 createDtdFile=True) 3439 Logger.addMessage("Outgested external table...") 3440 3441 # Use crosscat_neighbours 3442 outputCatalogue = os.path.join(self.sysc.sourcePosTablesPath(), 3443 "output_%s.cat" % field.fileID) 3444 command = ("crosscat_neighbours %s %s binary large %s binary large %s " 3445 " index filter %s ascii swap" % 3446 (self.sysc.sourcePosTablesPath(), 3447 os.path.basename(outPathName), 3448 os.path.basename(extOutPathName), matchRadius, 3449 os.path.basename(outputCatalogue))) 3450 extp.run(command) # Use extp? 3451 Logger.addMessage("Matched tables...") 3452 # @TODO: if 1-1 match - only extID that is nearest match. 3453 if self.isSourceMatched: 3454 # Get uniqueIDs 3455 matchData = [(long(sourceID), long(extID), float(dist)) 3456 for sourceID, extID, dist in csv.File(outputCatalogue)] 3457 uniqueList = set([extID for _sID, extID, _dist in matchData]) 3458 sourceIDLookUpDict = {} 3459 for extID in uniqueList: 3460 minDist = min([dist for _sID, eID, dist in matchData if eID == extID]) 3461 sourceID = [sID for sID, eID, dist in matchData if eID == extID and dist == minDist][0] 3462 sourceIDLookUpDict[extID] = sourceID 3463 else: 3464 sourceIDLookUpDict = dict([(long(extID), (long(sourceID), float(dist))) 3465 for sourceID, extID, dist in csv.File(outputCatalogue)]) 3466 os.remove(outPathName) 3467 os.remove(extOutPathName) 3468 os.remove(outputCatalogue) 3469 return sourceIDLookUpDict
3470 #-------------------------------------------------------------------------- 3471
3472 - def addKey(self, hdu, column, value, passband=None):
3473 """ Adds a key card to the FITS header properly formatted. 3474 """ 3475 # First determine what the keyword is 3476 reqKeyword = column.tag.get("--/E") or column.tag.get("--/K") 3477 if reqKeyword: 3478 reqKeyword = reqKeyword.split('.')[-1] #: Remove IMAGE. etc. 3479 3480 keyword = "hierarch " 3481 if reqKeyword and not passband: 3482 keyword = reqKeyword 3483 elif not passband: 3484 keyword += column.name 3485 else: 3486 keyword += "%s_%s" % (passband, 3487 column.tag.get("--/E") or column.name) 3488 3489 if "$Id" in str(value) or reqKeyword and not passband \ 3490 and any(key in keyword.upper() for key in EsoRelease.reservedKeys): 3491 3492 return # These will break the FITS file otherwise 3493 3494 # Then parse the value to the correct format 3495 if column.name in ["fileName", "compFile", "catName"]: 3496 value = os.path.basename(value) 3497 3498 elif keyword == "OBJECT" \ 3499 and self.programmeID == self.sysc.scienceProgs.get("VMC"): 3500 3501 value = value.split('-')[0] 3502 3503 elif "mx.DateTime.DateTime" in str(type(value)): 3504 value = str(value) 3505 3506 elif isinstance(value, str): # Parsing HTML codes 3507 value = value.replace("&#044;", ',') 3508 3509 # Determine how much room with have for a description in the key card. 3510 # This method underestimates cases where values are floats. 3511 desc = column.tag.get("--/D") 3512 self.addCard(hdu.header, keyword, value, desc)
3513 3514 3515 #-------------------------------------------------------------------------- 3516
3517 - def addCard(self, header, keyword, value, desc):
3518 """ 3519 """ 3520 cardWidthMax = (73 if not keyword.startswith("hierarch") else 72) 3521 valWidthMin = (20 if not keyword.startswith("hierarch") else 3522 {str: 8, float: 20}.get(type(value), 0)) 3523 3524 valWidth = max(len(str(value)), valWidthMin) 3525 descWidth = max(cardWidthMax - max(len(keyword), 8) - valWidth, 0) 3526 desc = (textwrap.wrap(desc, descWidth)[0] if desc and descWidth else '') 3527 header.update(keyword.upper(), value, desc)
3528 #-------------------------------------------------------------------------- 3529
3530 - def addPbMetadata(self, hdu, passband, table, mfID, extNum=None):
3531 """ Adds FITS file metadata from contents of given table for the 3532 current passband and row. 3533 """ 3534 where = "multiframeID=%s" % mfID 3535 if extNum: 3536 where += " AND extNum=%s" % extNum 3537 for column, value in self.queryMetadata(table, where): 3538 self.addKey(hdu, column, value, passband)
3539 3540 #-------------------------------------------------------------------------- 3541
3542 - def addPrimaryFrameKeys(self, hdu, frame):
3543 """ Adds keys to given FITS header from just this frame. 3544 """ 3545 for metadataTable in ["Multiframe", "MultiframeEsoKeys"]: 3546 for column, value in self.queryMetadata( 3547 table=self._metaSchema[metadataTable], 3548 where="multiframeID=%s" % frame.fileID, 3549 esoOnly=True): 3550 3551 self.addKey(hdu, column, value)
3552 3553 #-------------------------------------------------------------------------- 3554
3555 - def addStandardKeys(self, header, tableHdr, imageDetails, bandList, 3556 tableType='NONE', field=None):
3557 """ 3558 Add standard, fixed, ESO keys to the given FITS headers. This should be 3559 done last to override any existing values. 3560 3561 """ 3562 self.addCard(header, "ORIGIN", "ESO-PARANAL", 3563 "European Southern Observatory") 3564 self.addCard(header, "DATE", utils.makeMssqlTimeStamp().split('.')[0], 3565 "Date the file was written") 3566 self.addCard(header, "TELESCOP", "ESO-VISTA", 3567 "ESO Telescope Designation") 3568 self.addCard(header, "INSTRUME", "VIRCAM", "Instrument name") 3569 3570 if 'RADECSYS' in header and header['RADECSYS'] == "ICRS": 3571 header['RADECSYS'] = 'FK5' 3572 3573 isMultiband = tableType.startswith('S') 3574 # @TODO: Will this work in the case of metadata 3575 if isMultiband: 3576 filters = self.releaseDB.query( 3577 "shortName", "RequiredFilters as r,Filter as f", 3578 "r.filterID=f.filterID AND programmeID=%s ORDER BY r.filterID" 3579 % self.programmeID) 3580 self.addCard(header, "FILTER", "MULTI", "Band-merged data product") 3581 for index, filterName in enumerate(filters): 3582 self.addCard(header, "FILTER%s" % (index + 1), filterName.title(), 3583 "Filter name") 3584 if 'MD' in tableType: 3585 averageMagLim = numpy.median(self.magLim[(index + 1)]) 3586 self.addCard(header, "MAGLIM%s" % (index + 1), averageMagLim, 3587 "Median 5-sigma point source limiting AB mag") 3588 3589 else: 3590 isFilter = False 3591 for image in imageDetails: 3592 if image.filterName.lower() == filterName.lower(): 3593 if 'FSC' not in tableType and 'MD' not in tableType: 3594 self.addCard(tableHdr, "MAGLIM%s" % (index + 1), 3595 tableHdr.get("%s_ABMAGLIM" % image.filterName.upper(), 3596 dbc.realDefault()), 3597 "5-sigma point source limiting AB mag") 3598 elif 'FSC' in tableType: 3599 self.addCard(header, "MAGLIM%s" % (index + 1), 3600 tableHdr.get("%s_ABMAGLIM" % image.filterName.upper(), 3601 dbc.realDefault()), 3602 "5-sigma point source limiting AB mag") 3603 3604 isFilter = True 3605 if not isFilter: 3606 self.addCard(header, "MAGLIM%s" % (index + 1), dbc.realDefault(), 3607 "5-sigma point source limiting AB mag") 3608 fileType = ("deepimage" if "deep" in imageDetails[0].frameType else 3609 "image") 3610 frameType = imageDetails[0].frameType 3611 if 'MD' not in tableType: 3612 imageFormat = FileNameFormat(self.programme, self.releaseNum, 3613 os.path.join(self._fileDir, 'images'), 3614 fileType=fileType) 3615 tileHdr = (fits.open(imageFormat.getFilePath(imageDetails[0]))[1].header 3616 if ("deep" in imageDetails[0].frameType or self.useOBdata) 3617 and not self.skipDeepData else None) 3618 # Handles the case of a multi-band source table from shallow tiles 3619 3620 if not isMultiband: 3621 self.addCard(header, "EXPTIME", 3622 round(tableHdr.get("EXPTIME", dbc.realDefault()), 1), 3623 "Total integration time per pixel (s)") 3624 self.addCard(tableHdr, "EXPTIME", 3625 round(tableHdr.get("EXPTIME", dbc.realDefault()), 1), 3626 "Total integration time per pixel (s)") 3627 if tileHdr and 'FSC' not in tableType: 3628 self.addCard(header, "TEXPTIME", 3629 tileHdr.get("TEXPTIME", dbc.realDefault()), 3630 "Total integration time of all exposures (s)") 3631 3632 mjdObs = numpy.inf 3633 mjdEnd = 0 3634 obIDs = [] 3635 pointArea = 1.5 if "tile" in imageDetails[0].frameType else 0.6 3636 pointArea = (self.calcPointArea(imageDetails) 3637 if "mosaic" in imageDetails[0].frameType else 3638 pointArea) 3639 for image in imageDetails: 3640 if "deep" not in image.frameType or self.skipDeepData: 3641 # Need to do provenance calculations for shallows 3642 # @TODO: replace with lookups 3643 provInfo = Provenance(self.releaseDB, image.fileName) 3644 mjdObs = min(provInfo.getMjdObs(), mjdObs) 3645 mjdEnd = max(provInfo.getMjdEnd(), mjdEnd) 3646 obIDs += provInfo.getObsIDList() 3647 3648 else: # get info from deep tile 3649 imageFormat = FileNameFormat(self.programme, self.releaseNum, 3650 os.path.join(self._fileDir, 'images'), fileType="deepimage") 3651 deepHdr = fits.open(imageFormat.getFilePath(image))[1].header 3652 mjdObs = min(deepHdr.get("MJD-OBS", numpy.inf), mjdObs) 3653 mjdEnd = max(deepHdr.get("MJD-END", 0), mjdEnd) 3654 for key, value in deepHdr.items(): 3655 if key.startswith("OBID"): 3656 obIDs.append(value) 3657 self.addCard(header, "MJD-OBS", mjdObs, "Start of observations (days)") 3658 self.addCard(header, "MJD-END", mjdEnd, "End of observations (days)") 3659 if 'FSC' in tableType: 3660 self.addCard(header, "SKYSQDEG", pointArea, "Sky coverage in units of square degrees") 3661 self.addCard(header, "MJD-END", mjdEnd, "End of observations (days)") 3662 for index, obID in enumerate(utils.orderedSet(obIDs)): 3663 self.addCard(header, "OBID%s" % (index + 1), obID, "Observation block ID") 3664 self.addCard(header, "M_EPOCH", "deep" in frameType, 3665 "TRUE if resulting from multiple epochs") 3666 # Provenance 3667 index = 0 3668 isCat = False 3669 3670 if tableType == 'VFSC': # Or? 3671 # Add in MPHOT table names.... 3672 for mPhotFile in self.mPhotFilesDict[field.fileID]: 3673 fName = os.path.basename(mPhotFile) 3674 desc = self.getStdShortDesc(fName, 3675 "Originating MPHOT file") 3676 # if self.addExtProv or '_er' in fName: 3677 filter = fName.split('_')[3] 3678 if filter in bandList: 3679 self.addCard(header, "PROV%s" % (index + 1), fName, desc) 3680 index += 1 3681 3682 for imageDetail in imageDetails: 3683 fileType = ("deepimage" if "deep" in imageDetail.frameType else 3684 "image") 3685 # catName 3686 # @TODO Use arcfile if not image. 3687 # self.arcFileOBDict() 3688 if 'FSC' in tableType and tableType != 'EFSC': 3689 3690 if (fileType == "deepimage" or self.esoProgReq.incOBData == 1): 3691 catFormat = FileNameFormat(self.programme, self.releaseNum, 3692 os.path.join(self._fileDir, 'images'), 3693 fileType="cat") 3694 fName = catFormat.getFilePath(imageDetail) 3695 else: 3696 fName = fits.getCatalogue(imageDetail.fileName) 3697 isCat = True 3698 # Check to see if filename exists 3699 fName = self.getAdpName(os.path.basename(fName)) 3700 elif tableType == 'EFSC': 3701 if (fileType == "deepimage" or self.esoProgReq.incOBData == 1): 3702 imageFormat = FileNameFormat(self.programme, self.releaseNum, 3703 os.path.join(self._fileDir, 'images'), 3704 fileType=fileType) 3705 fName = imageFormat.getFilePath(imageDetail) 3706 else: 3707 fName = imageDetail.fileName 3708 isCat = False 3709 # Check to see if filename exists 3710 fName = self.getAdpName(os.path.basename(fName)) 3711 else: 3712 imageFormat = FileNameFormat(self.programme, self.releaseNum, 3713 os.path.join(self._fileDir, 'images'), 3714 fileType=fileType) 3715 fName = imageFormat.getFilePath(imageDetail) 3716 # Check to see if fileName exists 3717 if not os.path.exists(fName): 3718 # @FIXME: Get ADP name 3719 #fNameList = self.arcFileOBDict[imageDetail.fileName] 3720 fName = self.getAdpName(fName) 3721 if fName == "": 3722 self.missingAdpFiles = True 3723 return 3724 3725 fName = os.path.basename(fName) 3726 desc = self.getStdShortDesc(fName, 3727 "Originating source file" if isCat else 3728 "Originating image file") 3729 # if self.addExtProv or '_er' in fName: 3730 self.addCard(header, "PROV%s" % (index + 1), fName, desc) 3731 index += 1 3732 3733 else: 3734 # Query earliest and latest 3735 # No of fields x 3736 mfIDs = [image.fileID for image in imageDetails] 3737 pointArea = 1.5 if "tile" in imageDetails[0].frameType else 0.6 3738 3739 startEnd = self.releaseDB.query("min(mjdObs),max(mjdEnd)", 3740 "Multiframe", "multiframeID in (%s)" % 3741 ','.join(map(str, mfIDs)), firstOnly=True) 3742 self.addCard(header, "MJD-OBS", startEnd[0], "Start of observations (days)") 3743 self.addCard(header, "MJD-END", startEnd[1], "End of observations (days)") 3744 3745 self.addCard(header, "SKYSQDEG", (self.nPointings * pointArea), 3746 "Sky coverage in units of square degrees") 3747 3748 3749 if not isMultiband: 3750 self.addCard(header, "SINGLEXP", False, 3751 "Does this image result from a single exposure") 3752 3753 obsTech = ('IMAGE,JITTER' if header["OBSTECH"] == 'NONE' and 'mosaic' in imageDetails[0].frameType 3754 else header["OBSTECH"]) 3755 self.addCard(header, "OBSTECH", obsTech, "Technique of observation") 3756 3757 if 'MD' in tableType: 3758 self.addCard(header, "PRODCATG", "SCIENCE.MCATALOG".upper(), "Data product category") 3759 elif 'FSC' in tableType and not self.isRegion: 3760 self.addCard(header, "PRODCATG", "SCIENCE.CATALOGTILE".upper(), "Data product category") 3761 elif 'FSC' in tableType and self.isRegion: 3762 self.addCard(header, "PRODCATG", "SCIENCE.CATALOG".upper(), "Data product category") 3763 else: 3764 self.addCard(header, "PRODCATG", "science.srctbl".upper(), "Data product category") 3765 3766 if isMultiband: 3767 self.addCard(header, "APMATCHD", True, 3768 "TRUE if fluxes are aperture-matched") 3769 3770 self.addCard(header, "IMATYPE", "TILE" if "tile" in frameType or "mosaic" in frameType else "PAWPRINT", 3771 "Specific image type") 3772 3773 self.addCard(header, "ISAMP", False, 3774 "TRUE if image represents partially sampled sky") 3775 3776 reference = (EsoRelease.refForProg[self.programme.getAcronym().upper()] 3777 if not self.reference else self.reference) 3778 self.addCard(header, "REFERENC", reference, "Bibliographic reference") 3779 if 'MD' not in tableType and tileHdr: # Only need to copy from deeps, shallows are already OK 3780 # @TODO: This can be removed once CU13 calculates it. 3781 self.addCard(header, "TL_RA", tileHdr.get("TL_RA", dbc.realDefault()), 3782 "Tile RA [HHMMSS.TTT]") 3783 3784 # @TODO: This can be removed once CU13 calculates it. 3785 self.addCard(header, "TL_DEC", tileHdr.get("TL_DEC", dbc.realDefault()), 3786 "Tile Declination [DDMMSS.TTT]") 3787 3788 # @TODO: This can be removed once CU13 calculates it. 3789 self.addCard(header, "TL_OFFAN", tileHdr.get("TL_OFFAN", dbc.realDefault()), 3790 "Tile rotator offset angle [deg]") 3791 3792 # @TODO: Remove once sorted out in CU13: 3793 self.addCard(header, "RA", tileHdr.get("RA", dbc.realDefault()), 3794 "RA (J2000) at device centre") 3795 # @TODO: Remove once sorted out in CU13: 3796 self.addCard(header, "DEC", tileHdr.get("DEC", dbc.realDefault()), 3797 "DEC (J2000) at device centre") 3798 3799 3800 if tableType == 'CAT': 3801 # Update primary header RA and DEC 3802 # If stack - use # CRVAL from extNum = 2 3803 # if tile or mosaic use centralRA / centralDec 3804 raDec = self.primaryHeaderPositionDict[field.fileID] 3805 self.addCard(header, "RA", raDec[0], 3806 "RA (J2000) at image centre") 3807 self.addCard(header, "DEC", raDec[1], 3808 "DEC (J2000) at image centre") 3809 progIDList = self.progIDListDict[field.fileID] 3810 if len(progIDList) > 1: 3811 self.addCard(header, "PROG_ID", "MULTI", "ESO program identification") 3812 for idx, esoProgID in enumerate(progIDList): 3813 self.addCard(header, "PROGID%s" % (idx + 1), 3814 esoProgID, "ESO program identification") 3815 self.addCard(header, "OBJECT", self.tileObjectDict[field.fileID], 3816 "OB name") 3817 else: 3818 if 'TEXPTIME' in header: 3819 del header['TEXPTIME'] 3820 if 'TEXPTIME' in tableHdr: 3821 del tableHdr['TEXPTIME'] 3822 3823 if 'FSC' in tableType: 3824 # @FIXME tileObjectDict - larger for VVV - ones in SynopticSource... 3825 3826 self.addCard(header, "OBJECT", self.tileObjectDict[imageDetails[0].fileID], 3827 "OB name") 3828 3829 progIDList = self.progIDListDict[field.fileID] 3830 3831 if len(progIDList) > 1: 3832 self.addCard(header, "PROG_ID", "MULTI", "ESO program identification") 3833 for idx, esoProgID in enumerate(progIDList): 3834 self.addCard(header, "PROGID%s" % (idx + 1), 3835 esoProgID, "ESO program identification") 3836 if 'MD' in tableType: 3837 frameSetIDs = (queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 3838 for fieldID in self.fieldIDs) 3839 progIDList = set() 3840 for frameSetID in frameSetIDs: 3841 progIDList = progIDList.union(self.progIDListDict[frameSetID]) 3842 if len(progIDList) > 1: 3843 self.addCard(header, "PROG_ID", "MULTI", "ESO program identification") 3844 for idx, esoProgID in enumerate(progIDList): 3845 self.addCard(header, "PROGID%s" % (idx + 1), 3846 esoProgID, "ESO program identification") 3847 3848 if self.programmeID == self.sysc.scienceProgs.get("VMC"): 3849 self.addCard(header, "EPS_REG", "%s-%s" % (header.get("EPS_REG"), 3850 header.get("OBJECT")), 3851 "ESO public survey region name") 3852 elif field and self.isRegion and self.regionDict[field.fileID]: 3853 self.addCard(header, "EPS_REG", "%s/%s" % (header.get("EPS_REG"), 3854 self.regionDict[field.fileID][0]), 3855 "ESO public survey region name") 3856 3857 else: 3858 self.addCard(header, "EPS_REG", header.get("EPS_REG"), 3859 "ESO public survey region name") 3860 header.add_comment("Catalogue created by WFAU") 3861 3862 ##### TABLE HEADER ##### 3863 if 'MD' in tableType or 'FSC' in tableType: 3864 self.addCard(tableHdr, "EXTNAME", "PHASE3CATALOG", "FITS Extension name") 3865 self.addCard(header, "PHOTSYS", self.esoProgReq.photSys, "Photometric system") 3866 3867 else: 3868 self.addCard(tableHdr, "EXTNAME", 3869 ("source table" if isMultiband else "detection table"), 3870 "FITS Extension name") 3871 self.addCard(tableHdr, "PHOTSYS", self.esoProgReq.photSys, "Photometric system") 3872 3873 3874 self.addCard(tableHdr, "INHERIT", True, 3875 "Primary header keywords are inherited") 3876 3877 if 'FSC' in tableType or tableType == 'SL': 3878 mfID = imageDetails[0].fileID 3879 footPrintInfo = astro.findCorners(mfID, extNum=2, db=self.releaseDB) 3880 for index, (ra, dec) in enumerate(footPrintInfo): 3881 if 'FSC' not in tableType: 3882 self.addCard(tableHdr, "FPRA%s" % (index + 1), ra, 3883 "Footprint (J2000.0)") 3884 3885 self.addCard(tableHdr, "FPDE%s" % (index + 1), dec, 3886 "Footprint (J2000.0)") 3887 else: 3888 self.addCard(header, "FPRA%s" % (index + 1), ra, 3889 "Footprint (J2000.0)") 3890 3891 self.addCard(header, "FPDE%s" % (index + 1), dec, 3892 "Footprint (J2000.0)") 3893 elif 'MD' not in tableType: 3894 3895 if tileHdr: 3896 self.addCard(tableHdr, "ABMAGLIM", 3897 tileHdr.get("ABMAGLIM", dbc.realDefault()), 3898 "5-sigma point source limiting AB mag") 3899 self.addCard(tableHdr, "ABMAGSAT", 3900 tileHdr.get("ABMAGSAT", dbc.realDefault()), 3901 "Point source saturation limit (AB mags)") 3902 3903 seeing = tileHdr.get("PSF_FWHM", dbc.realDefault()) 3904 else: 3905 seeing = tableHdr.get("SEEING", 0) * tableHdr.get("xPixSize", 0) 3906 self.addCard(tableHdr, "PSF_FWHM", round(seeing, 4) or dbc.realDefault(), 3907 "Spatial resolution (arcsec)") 3908 3909 self.addCard(tableHdr, "ELLIPTIC", 3910 round(tableHdr.get("ELLIPTIC", dbc.realDefault()), 4), 3911 "Average ellipticity of point sources")
3912 # Delete any unnecessary ones 3913 3914 3915 #-------------------------------------------------------------------------- 3916
3917 - def getAdpName(self, fName):
3918 """ 3919 """ 3920 # Check list of Archive Data Products from ESO Archive. 3921 if fName in self.esoArchFileDict: 3922 return self.esoArchFileDict[fName] 3923 elif fName + '.fz' in self.esoArchFileDict: 3924 return self.esoArchFileDict[fName + '.fz'] 3925 else: 3926 if "_er" in fName: 3927 for relNo in range(self.releaseNum): 3928 tName = fName.replace("_er%s" % self.releaseNum, "_er%s" % relNo) 3929 if tName in self.esoArchFileDict: 3930 return self.esoArchFileDict[tName] 3931 elif tName + '.fz' in self.esoArchFileDict: 3932 return self.esoArchFileDict[tName + '.fz'] 3933 return ""
3934 3935 #-------------------------------------------------------------------------- 3936 3937
3938 - def calcPointArea(self, imageDetails):
3939 """ Calculates the area of the pointing mosaic. 3940 """ 3941 pointAreaList = [] 3942 for image in imageDetails: 3943 pointAreaList.append(self.releaseDB.query( 3944 "xPixSize*yPixSize*axis1Length*axis2Length/(3600.*3600.)", 3945 "MultiframeDetector as d,CurrentAstrometry as c", 3946 "d.multiframeID=c.multiframeID and d.extNum=c.extNum and " 3947 "d.multiframeID=%s" % image.fileID, firstOnly=True, default=0.)) 3948 return max(pointAreaList)
3949 3950 #--------------------------------------------------------------------------
3951 - def getStdShortDesc(self, value, description):
3952 """ @return: Abbreviated version of given description depending upon 3953 the length of the value, for the case of standard FITS 3954 header keywords. 3955 @rtype: str 3956 """ 3957 # @TODO: Move into FitsUtils? 3958 descWidth = max(65 - len(value), 0) 3959 3960 return textwrap.wrap(description, descWidth)[0] if descWidth else ''
3961 3962 #-------------------------------------------------------------------------- 3963
3964 - def queryFileProducts(self, mfIDs=None, fileName=None, isPawPrint=False, fieldID=None):
3965 """ @return: By default a list of image details, including file paths, 3966 to WFAU-produced deep tiles for the current programme. 3967 If mfIDs are supplied then it returns image details for 3968 the given mfIDs which could be of any frameType. 3969 @rtype: list(Query) 3970 """ 3971 3972 areMfIDsFixed = mfIDs is not None 3973 3974 fieldIDs = [fieldID] if fieldID else self.fieldIDs 3975 areMosaics = self.releaseDB.queryEntriesExist("RequiredMosaic", 3976 "programmeID=%s" % self.programmeID) 3977 if not isPawPrint: 3978 frameTypeSel = (("frameType LIKE 'mosaic%deep%stack%'") if areMosaics else 3979 ("frameType LIKE 'tile%stack%'" if self.useOBdata else 3980 "frameType LIKE 'tiledeep%stack%'")) 3981 else: 3982 frameTypeSel = ("frameType NOT LIKE 'tile%' AND frameType like '%stack%'" if self.useOBdata else 3983 "frameType NOT LIKE 'tile%' AND frameType LIKE 'deep%stack%'") 3984 if mfIDs: 3985 fromStr = "Multiframe" 3986 whereStr = "multiframeID IN (%s)" % ','.join(map(str, [mfID for mfID in mfIDs if mfID > 0])) 3987 3988 elif fileName: 3989 # Read in data - get fields 3990 # VIKING csv: frameSetID,multiframeID,rabase,decbase,dateobs, 3991 # filterName,obsname,obsid,esoGrade,obStatus 3992 # Use multiframeID - get 3993 mfIDs = [mfID for _fsID, mfID, _rab, _deb, _dateO, _filtN, _obsN, 3994 _obsID, _esoG, _obSt in csv.File(fileName)] 3995 # Need confidence frames too. 3996 mfIDs.extend(list(self.releaseDB.query( 3997 "confID", "Multiframe", "multiframeID IN (%s)" 3998 % ','.join(map(str, mfIDs))))) 3999 fromStr = Join(["Multiframe", "ProgrammeFrame"], "multiframeID") 4000 whereStr = frameTypeSel 4001 whereStr += " AND programmeID=%s AND %s" \ 4002 % (self.programmeID, DepCodes.selectNonDeprecated + 4003 " AND Multiframe.multiframeID IN (%s)" % ','.join(map(str, mfIDs))) 4004 # set fieldIDs 4005 frameSetIDs = set(fsID for fsID, _mfID, _rab, _deb, _dateO, _filtN, _obsN, 4006 _obsID, _esoG, _obSt in csv.File(fileName)) 4007 fieldIDs = [] 4008 for fsID in frameSetIDs: 4009 fieldIDs.append( 4010 queries.getFieldID(self.releaseDB, fsID, self.programme)) 4011 else: 4012 if fieldIDs: 4013 products = SelectSQL("productID", 4014 "RequiredMosaic" if areMosaics else "RequiredTile", 4015 where="fieldID IN (%s) AND programmeID=%s" 4016 % (','.join(map(str, fieldIDs)), self.programmeID)) 4017 # ProductID selection only gets %tile%stack or deep%tile%stackconf 4018 # @TODO: replace with getMultiframeID...... 4019 fromStr = Join(["Multiframe", "ProgrammeFrame"], "multiframeID") 4020 whereStr = (("frameType LIKE 'mosaic%deep%stack'") if areMosaics else 4021 ("frameType LIKE 'tile%stack'" if self.useOBdata else 4022 "frameType LIKE 'tiledeep%stack'")) 4023 whereStr += " AND programmeID=%s AND %s" \ 4024 % (self.programmeID, DepCodes.selectNonDeprecated + 4025 (" AND productID IN (%s)" % products 4026 if fieldIDs else "")) 4027 frames = self.releaseDB.query("Multiframe.multiframeID,confID", 4028 fromStr, whereStr) 4029 mfIDs = ','.join(["%s,%s" % (mfID, cfID) for mfID, cfID in frames]) 4030 fromStr = Join(["Multiframe", "ProgrammeFrame"], "multiframeID") 4031 whereStr = "programmeID=%s AND Multiframe.multiframeID in (%s) AND %s" \ 4032 % (self.programmeID, mfIDs, DepCodes.selectNonDeprecated) 4033 # Get ra,dec from productID - RequiredTile. 4034 frameType = 'stack' if isPawPrint else ('mosaic' if areMosaics else 'tile') 4035 # @TODO: Does not use fromStr, whereStr.... 4036 4037 if isPawPrint: 4038 if mfIDs: 4039 selAttr = "confID" if 'conf' in self.releaseDB.query("frameType", "Multiframe", 4040 "multiframeID in (%s)" % ','.join(map(str, mfIDs)), firstOnly=True) else "multiframeID" 4041 productIDs = self.releaseDB.query( 4042 "productID", "ProgrammeFrame as p,Multiframe as m", 4043 "m.multiframeID=p.multiframeID and m.%s in (%s) and programmeID=%s" % 4044 (selAttr, ','.join(map(str, mfIDs)), self.programmeID)) 4045 else: 4046 productIDs = self.releaseDB.query("s.productID", "RequiredTile as t," 4047 "ProductLinks as l,RequiredStack as s", 4048 "t.programmeID=%s AND t.programmeID=l.programmeID AND " 4049 "t.productID=l.combiProductID AND l.combiProdType='tile' AND " 4050 "l.intProdType='stack' AND l.intProductID=s.productID AND " 4051 "s.programmeID=l.programmeID AND %s" % (self.programmeID, 4052 "t.fieldID in (%s)" % ','.join(map(str, fieldIDs)) 4053 if fieldIDs else "")) 4054 if areMosaics: 4055 Logger.addMessage("This does not work yet!") 4056 exit() 4057 4058 else: 4059 productIDs = self.releaseDB.query("productID", 4060 "RequiredMosaic" if areMosaics else "RequiredTile", 4061 "programmeID=%s %s" % (self.programmeID, 4062 " AND fieldID in (%s)" % ','.join(map(str, fieldIDs)) 4063 if fieldIDs else "")) 4064 vc = VistaCal() 4065 dateRange = vc.dateRange() 4066 reqFrames = self.releaseDB.query(EsoRelease.imageFields, fromStr, whereStr, 4067 orderBy="confID") 4068 dictMfIDProdID = queries.getIntStacks(self.releaseDB, self.programmeID, 4069 productIDs, frameType, dateRange, intFrameType=frameType, 4070 returnAsProdIDDict=True, getConfToo=True) 4071 # dictMfIDProdID.update(queries.getIntStacks(self.releaseDB, self.programmeID, 4072 # productIDs, "tile", dateRange, intFrameType='tilestackconf', 4073 # returnAsProdIDDict=True)) 4074 # Add in products 4075 dictMfIDProdID.update(dict(self.releaseDB.query("p.multiframeID,productID", 4076 "ProgrammeFrame as p,Multiframe as m", 4077 "m.multiframeID=p.multiframeID AND programmeID=%s and productID>0 " 4078 "AND frameType like '%%stack'" % self.programmeID))) 4079 # Confidence don't always have correct productID, but images do 4080 dictMfIDProdID.update(dict(self.releaseDB.query("m.confID,productID", 4081 "ProgrammeFrame as p,Multiframe as m", 4082 "m.multiframeID=p.multiframeID AND programmeID=%s and productID>0 " 4083 "AND frameType like '%%stack'" % self.programmeID))) 4084 dictPosProdIDs = dict([(pID, (ra, dec)) for pID, ra, dec in 4085 self.releaseDB.query("productID,ra,dec", "Required%s" % frameType, 4086 "programmeID=%s" % self.programmeID)]) 4087 tiles = [EsoRelease.ImageField(*[fileName, mfID, ( 4088 dictPosProdIDs[dictMfIDProdID[mfID]][0] / 15.), 4089 dictPosProdIDs[dictMfIDProdID[mfID]][1], filterName, frameType, 4090 confID, dictMfIDProdID[mfID]]) for fileName, mfID, _oldRa, _oldDec, filterName, frameType, 4091 confID in reqFrames] 4092 # Non product OB frames 4093 if not isPawPrint and self.useOBdata and not areMosaics and not areMfIDsFixed: 4094 OBframes = self.releaseDB.query( 4095 selectStr="m.fileName,m.multiframeID as fileID,(t.ra/15.) as ra,t.dec," 4096 "m.filterName, m.frameType, m.confID, %s as productID" 4097 % dbc.intDefault(), 4098 fromStr="RequiredTile as t,ProgrammeFrame as p," 4099 "Multiframe as m,CurrentAstrometry as c", 4100 whereStr="t.fieldID in (%s) AND t.programmeID=%s AND " 4101 "t.programmeID=p.programmeID AND p.productID<0 AND " 4102 "p.multiframeID=m.multiframeID AND m.frameType like 'tile%%stack' " 4103 "AND m.frameType not like '%%deep%%' AND m.deprecated<128 AND " 4104 "m.multiframeID=c.multiframeID AND m.filterID=t.filterID AND " 4105 "dbo.fGreatCircleDist(c.centralRa,c.centralDec,t.ra,t.dec)" 4106 "<(60.*t.stackRadius) AND %s" % 4107 (','.join(map(str, fieldIDs)), self.programmeID, 4108 DepCodes.selectNonDeprecated)) 4109 tiles += OBframes 4110 cnfIDPosDict = dict([(frame.confID, (frame.ra, frame.dec)) for frame in OBframes]) 4111 cnfIDs = [frame.confID for frame in OBframes] 4112 if OBframes: 4113 OBConfFrames = self.releaseDB.query( 4114 selectStr="m.fileName,m.multiframeID," 4115 "m.filterName, m.frameType, m.confID, %s as prodID" 4116 % dbc.intDefault(), 4117 fromStr="Multiframe as m", 4118 whereStr="multiframeID in (%s)" % (','.join(map(str, cnfIDs)))) 4119 tiles += [EsoRelease.ImageField(*[frame.fileName, frame.multiframeID, 4120 cnfIDPosDict[frame.multiframeID][0], cnfIDPosDict[frame.multiframeID][1], 4121 frame.filterName, frame.frameType, frame.confID, frame.prodID]) 4122 for frame in OBConfFrames] 4123 return tiles
4124 4125 #-------------------------------------------------------------------------- 4126
4127 - def queryJpegFiles(self, field, isConf=False):
4128 """ @return: List of jpeg files associated with this field. 4129 @rtype: list(tuple(int, str)) 4130 """ 4131 jpegFiles = self.releaseDB.query( 4132 selectStr="extNum, compFile", 4133 fromStr="MultiframeDetector", 4134 whereStr="multiframeID=%s" % 4135 (field.confID if isConf else field.fileID), 4136 orderBy="extNum") 4137 4138 if len(jpegFiles) is 1: # Strip unnecessary extNum data 4139 jpegFiles[0] = jpegFiles[0]._replace(extNum=None) 4140 4141 return jpegFiles
4142 4143 #-------------------------------------------------------------------------- 4144
4145 - def queryMetadata(self, table, where, esoOnly=False, allRows=False):
4146 """ @return: A generator of pairs, column name, value, for the given 4147 row of the given table. 4148 @rtype: Generator(tuple(str, PyValue)) 4149 """ 4150 columns = (table.columns 4151 if not esoOnly and table.name != "MultiframeEsoKeys" else 4152 [column for column in table.columns if column.tag.get("--/E")]) 4153 4154 selectStr = ', '.join(map(str, columns)) 4155 if not allRows: 4156 return zip(columns, 4157 self.releaseDB.query(selectStr, table, where, firstOnly=True)) 4158 else: 4159 return [zip(columns, data) for data in self.releaseDB.query(selectStr, table, where)]
4160 #-------------------------------------------------------------------------- 4161
4162 - def setRegions(self):
4163 """ 4164 """ 4165 self.regionInfo = self.archive.query("*", "SurveyRegion", 4166 "programmeID=%s" % self.programmeID) 4167 self.regionPartsFieldIDDict = {} 4168 self.regionFieldIDDict = {} 4169 regionPartInfo = self.archive.query( 4170 "distinct regionName,partName,fieldID", 4171 "SurveyRegionParts as srp,Required%s as r" % 4172 ("Mosaic" if self.areMosaics else "Tile"), 4173 "dbo.fGreatCircleDist(srp.centralRa,srp.centralDec,r.ra,r.dec)<(60.*%s)" 4174 % (self.sysc.maxRaDecExtentStackDeg)) 4175 self.regionPartsFieldIDDict = dict([(fID, pName) for _rn, pName, fID in regionPartInfo]) 4176 self.regionFieldIDDict = dict([(fID, rName) for rName, _pn, fID in regionPartInfo])
4177 4178 #-------------------------------------------------------------------------- 4179
4180 - def calcTilePos(self, mfID, isPawPrint=False):
4181 """ @return: Tile position information for given multiframeID when ESO 4182 information is missing. 4183 @rtype: TilePosition 4184 """ 4185 # @TODO: also do mosaics 4186 isTiledSurvey = (self.programme.getAttr("sourceProdType") == 'tile' or 4187 self.programme.getAttr("sourceProdType") == 'mosaic') 4188 4189 tileMfID = None 4190 if isPawPrint and isTiledSurvey: 4191 # Must find corresponding tile 4192 tileMfID = self.releaseDB.query("combiframeID", "Provenance", 4193 "multiframeID=%s" % mfID, firstOnly=True) 4194 4195 elif isTiledSurvey: 4196 tileMfID = mfID 4197 if not tileMfID: 4198 return 4199 4200 posInfo = self.releaseDB.query( 4201 selectStr="centralRa AS ra, centralDec AS dec, posAngle", 4202 fromStr="CurrentAstrometry", 4203 whereStr="multiframeID=%d and extNum=2" % tileMfID, 4204 firstOnly=True) 4205 # convert 4206 tileRa, tileDec = astro.convertCoordEso(posInfo.ra, posInfo.dec) 4207 4208 TilePos = namedtuple("TilePosition", 4209 "multiframeID tileRa tileDec tileOffAngle") 4210 4211 return TilePos(mfID, tileRa, tileDec, posInfo.posAngle)
4212 4213 #-------------------------------------------------------------------------- 4214
4215 - def getSatLimit(self, mfID, extNum):
4216 """ @return: Saturation limit for given detector. 4217 @rtype: float 4218 """ 4219 if self.releaseDB.isLoadDb: 4220 detTable = self.programme.getPhotometryTable() + " AS p" 4221 detTable += ", " + self.programme.getDetectionTable() 4222 joinStr = " AND p.multiframeID=d.multiframeID" 4223 joinStr += " AND p.extNum=d.extNum" 4224 joinStr += " AND p.seqNum=d.seqNum" 4225 else: 4226 detTable = self.programme.getAttr('detectionTable') 4227 joinStr = '' 4228 4229 satLim = self.releaseDB.query( 4230 selectStr="AVG(aperMag3+vegaToAB)", 4231 fromStr=detTable + " AS d, Filter AS f, MultiframeDetector AS m", 4232 whereStr="ABS(pHeight + sky - 0.9*avSaturLevel) < 0.3*avSaturLevel" 4233 " AND m.multiframeID=d.multiframeID AND m.multiframeID=%d" 4234 " AND d.extNum=m.extNum AND m.extNum=%d" % (mfID, extNum) 4235 + " AND f.filterID=d.filterID AND aperMag3>0" + joinStr, 4236 firstOnly=True, 4237 default=dbc.realDefault()) 4238 4239 return satLim
4240 4241 #-------------------------------------------------------------------------- 4242
4243 - def getAbMagLim(self, mfID, extNum):
4244 """ @return: AB magnitude limit for given detector. 4245 @rtype: float 4246 """ 4247 if self.releaseDB.isLoadDb: 4248 detTable = self.programme.getPhotometryTable() + " AS p" 4249 detTable += ", " + self.programme.getDetectionTable() 4250 joinStr = " AND p.multiframeID=d.multiframeID" 4251 joinStr += " AND p.extNum=d.extNum" 4252 joinStr += " AND p.seqNum=d.seqNum" 4253 else: 4254 detTable = self.programme.getAttr('detectionTable') 4255 joinStr = '' 4256 extractor = ('SEX' 4257 if 'mosaic' in self.releaseDB.query("frameType", "Multiframe", 4258 "multiframeID=%s" % mfID, firstOnly=True) else 'CASU') 4259 4260 magData = numpy.array(self.releaseDB.query( 4261 selectStr="aperMag3,aperMag3Err", 4262 fromStr=detTable + " AS d", 4263 whereStr="d.multiframeID=%s and d.extNum=%s and aperFlux3>0. and " 4264 "class in (%s)" % (mfID, extNum, ('-1,-9,-2' if extractor == 'CASU' else '-1,-2,-3')))) 4265 expML, vegaToAB = self.releaseDB.query( 4266 "(1.25*log10(totalExpTime)+oneSecMlVg) as expML,vegaToAB", 4267 "MultiframeDetector as d,Filter as f", 4268 "multiframeID=%s and extNum=%s and d.filterID=f.filterID" % 4269 (mfID, extNum), firstOnly=True) 4270 if magData.size > 0: 4271 mLimit = cu13.getRobustLimit(magData[:, 0], magData[:, 1], expML) 4272 else: 4273 mLimit = dbc.realDefault() 4274 return mLimit + vegaToAB
4275 #-------------------------------------------------------------------------- 4276
4277 - def checkifExternal(self):
4278 """ 4279 """ 4280 return self.archive.queryEntriesExist("ExternalProduct", 4281 "programmeID=%s AND releaseNum<=%s AND productType='%s'" % 4282 (self.programmeID, self.releaseNum, self.catType))
4283 #-------------------------------------------------------------------------- 4284
4285 - def checkifExternalVarTables(self):
4286 """ 4287 """ 4288 return self.archive.queryEntriesExist("ExternalProduct", 4289 "programmeID=%s AND releaseNum<=%s AND tableName like '%%variables%%'" % 4290 (self.programmeID, self.releaseNum))
4291 #-------------------------------------------------------------------------- 4292
4293 - def updateDatabase(self):
4294 """ Update VSA to keep a record of what we are doing. 4295 """ 4296 allFilesInReleaseList = [] 4297 for directory in self.directoryDict: 4298 dirName = self.directoryDict[directory] 4299 allFilesInReleaseList.extend( 4300 [(os.path.join(directory, fName)) 4301 for fName in os.listdir(dirName)]) 4302 allFilesInReleaseSet = set(allFilesInReleaseList) 4303 entriesExistER = self.archive.queryEntriesExist("EsoRelease", 4304 "programmeID=%s AND releaseNum=%s" % 4305 (self.programmeID, self.releaseNum)) 4306 entriesExistERF = self.archive.queryEntriesExist("EsoReleaseFiles", 4307 "programmeID=%s AND releaseNum=%s" % 4308 (self.programmeID, self.releaseNum)) 4309 if entriesExistERF: 4310 4311 curEsoFiles = set(self.archive.query("fileName", "EsoReleaseFiles", 4312 "programmeID=%s AND releaseNum=%s" % 4313 (self.programmeID, self.releaseNum))) 4314 newFiles = allFilesInReleaseSet.difference(curEsoFiles) 4315 missFiles = curEsoFiles.difference(allFilesInReleaseSet) 4316 4317 #if missFiles: 4318 # Logger.addExceptionWarning("There are files in the database " 4319 # "that are not in the release. Is this correct?") 4320 # Logger.addMessage("Missing files...") 4321 # for fileName in missFiles: 4322 # Logger.addMessage("..." + fileName) 4323 else: 4324 newFiles = allFilesInReleaseSet 4325 esoReleaseRow = [self.programmeID, self.cuEventID, self.releaseNum, 4326 self.esoProductTypes, self.releaseDB.database] 4327 if not entriesExistER: 4328 self.archive.insertData("EsoRelease", esoReleaseRow, enforcePKC=True) 4329 else: 4330 self.archive.update("EsoRelease", [("cuEventID", self.cuEventID), ("productTypes", self.newProdTypes), 4331 ("fromDb", "'" + self.releaseDB.database + "'")], 4332 where='programmeID=%s AND releaseNum=%s' % (self.programmeID, self.releaseNum)) 4333 4334 for filePath in newFiles: 4335 esoReleaseFilesRow = [self.programmeID, self.cuEventID, 4336 self.releaseNum, filePath] 4337 self.archive.insertData("EsoReleaseFiles", esoReleaseFilesRow, 4338 enforcePKC=True) 4339 if self.currentRowNumber: 4340 if self.archive.queryEntriesExist("EsoReleaseMEpoch", 4341 where="programmeID=%s" % self.programmeID): 4342 self.archive.update("EsoReleaseMEpoch", [("cuEventID", self.cuEventID), 4343 ("currentRowNumber", self.currentRowNumber)], 4344 where='programmeID=%s' % (self.programmeID)) 4345 else: 4346 esoReleaseMEpochRow = [self.programmeID, self.cuEventID, 4347 self.currentRowNumber] 4348 self.archive.insertData("EsoReleaseMEpoch", esoReleaseMEpochRow, 4349 enforcePKC=True)
4350 #------------------------------------------------------------------------------ 4351 4352 4353 #------------------------------------------------------------------------------ 4354
4355 -def getPosition(IAUNameString):
4356 """ 4357 """ 4358 coordStr = IAUNameString.split('J')[1] 4359 rah = coordStr[:6] 4360 if '-' in coordStr: 4361 sign = -1 4362 rahFrac = coordStr[7:].split('-')[0] 4363 decStr = coordStr[7:].split('-')[1] 4364 else: 4365 sign = 1 4366 rahFrac = coordStr[7:].split('+') 4367 decStr = coordStr[7:].split('+')[1] 4368 decd = decStr[:6] 4369 nDPra = len(rahFrac) 4370 decFrac = decStr[7:] 4371 nDPdec = len(decFrac) 4372 ra = 15.*(int(rah[:2]) + (int(rah[2:4]) / 60.) + (int(rah[4:]) + (int(rahFrac) / (10 ** nDPra)) / 3600.)) 4373 dec = sign * (int(decd[:2]) + (int(decd[2:4]) / 60.) + (int(decd[4:]) + (int(decFrac) / (10 ** nDPdec)) / 3600.)) 4374 return ra, dec
4375 #------------------------------------------------------------------------------ 4376
4377 -def getFiltIndex(obFilters, obIndex, index):
4378 """ 4379 """ 4380 if obIndex == 0: 4381 return index 4382 else: 4383 total = 0 4384 for ii, ob in enumerate(obFilters): 4385 if ii < obIndex: 4386 total += len(ob) 4387 return total + index
4388 #------------------------------------------------------------------------------ 4389 4390 if __name__ == "__main__": 4391 # Command-line interface specification 4392 CLI.progArgs += [ 4393 CLI.Argument("programmeID", "VVV"), 4394 CLI.Argument("esoReleaseNum", str(EsoRelease.releaseNum), 4395 isValOK=lambda val: val.isdigit()), 4396 CLI.Argument("esoProductTypes", str(EsoRelease.esoProductTypes), 4397 isValOK=lambda val: val.isdigit()), 4398 CLI.Argument("releaseDBName", "VMCv20120126")] 4399 CLI.progOpts += [ 4400 CLI.Option('C', "isCutOut", 4401 "Is cutout of larger release"), 4402 CLI.Option('N', "noExternNeighbour", 4403 "Force release to rematch external tables"), 4404 CLI.Option('e', "exterProv", 4405 "Add in external provenance"), 4406 CLI.Option('f', "fileName", 4407 "Filename of CSV or similar file with list of framesets, multiframes", 4408 "FILE"), 4409 CLI.Option('k', "skipDeeps", "Do not include deeps"), 4410 CLI.Option('l', "fields", 4411 "comma separated list of highest layer of fieldIDs e.g. mosaic " 4412 "fieldIDs if stacks and mosaics, tile fieldIDs if stacks and tiles or " 4413 "stacks if just stacks", 4414 "LIST"), 4415 CLI.Option('j', "jpegs", "include JPEGS for pawprints"), 4416 CLI.Option('p', "piInfo", "quickly get PI info only"), 4417 CLI.Option('q', "quick_run", "create small test files"), 4418 CLI.Option('r', "release", 4419 "Don't create a new release but continue with this release number", 4420 "NUMBER", isValOK=lambda val: val.isdigit())] 4421 4422 cli = CLI(EsoRelease, "$Revision: 10246 $") 4423 Logger.isVerbose = False 4424 Logger.addMessage(cli.getProgDetails()) 4425 CLI.check64bitServer() 4426 cu = EsoRelease(cli.getArg("programmeID"), cli=cli) 4427 cu.copyJpegsPaws = cli.getOpt("jpegs") 4428 cu.skipDeepData = cli.getOpt("skipDeeps") 4429 cu.addExtProv = cli.getOpt("exterProv") 4430 if cli.getOpt("fields"): 4431 # @TODO: 4432 cu.fieldIDs = csv.values(utils.expandNumberRange(cli.getOpt("fields"))) 4433 if cli.getOpt("fileName"): 4434 cu.fileName = cli.getOpt("fileName") 4435 if cli.getOpt("piInfo"): 4436 cu.piOnly = cli.getOpt("piInfo") 4437 cu.isQuickRun = cli.getOpt("quick_run") 4438 cu.releaseNum = int(cli.getArg("esoReleaseNum")) 4439 cu.esoProductTypes = int(cli.getArg("esoProductTypes")) 4440 cu.releaseDBName = cli.getArg("releaseDBName") 4441 cu.isTrialRun = cli.getOpt("test") 4442 cu.isCutOut = cli.getOpt("isCutOut") 4443 cu.noExternNeigh = cli.getOpt("noExternNeighbour") 4444 cu.run() 4445 4446 #------------------------------------------------------------------------------ 4447