Package helpers :: Module EsoRelease
[hide private]

Source Code for Module helpers.EsoRelease

   1  #! /usr/bin/env python 
   2  #------------------------------------------------------------------------------ 
   3  # $Id: EsoRelease.py 10246 2014-03-13 12:29:55Z NicholasCross $ 
   4  """ 
   5     Creates the FITS files necessary for the current ESO-SAF data release 
   6     requirements. These are currently copies of the deep tile images and 
   7     confidence maps; catalogues of these via detection table outgests; passband 
   8     merged source tables also via source table outgests and light curves from 
   9     a database query. 
  10   
  11     @author: R.S. Collins 
  12     @org:    WFAU, IfA, University of Edinburgh 
  13  """ 
  14  #------------------------------------------------------------------------------ 
  15  from __future__      import division, print_function 
  16  from future_builtins import map, zip 
  17   
  18  from   collections     import defaultdict, namedtuple 
  19  import hashlib 
  20  import math 
  21  from   multiprocessing import Process 
  22  import numpy 
  23  from   operator        import attrgetter, itemgetter 
  24  import os 
  25  import pyfits 
  26  import shutil 
  27  import textwrap 
  28  import copy 
  29   
  30  from   wsatools.CLI                 import CLI 
  31  import wsatools.CSV                     as csv 
  32  import wsatools.DbConnect.CommonQueries as queries 
  33  from   wsatools.DbConnect.CuSession import CuSession 
  34  import wsatools.DbConnect.DbConstants   as dbc 
  35  from   wsatools.DbConnect.DbSession import Join, PkLeftJoin, SelectSQL, Outgester, DbSession 
  36  import wsatools.DbConnect.Schema        as schema 
  37  import wsatools.FitsUtils               as fits 
  38  from   wsatools.Logger              import ForLoopMonitor, Logger 
  39  from   wsatools.ProvenanceUtils     import Provenance 
  40  from   wsatools.SystemConstants     import DepCodes 
  41  import wsatools.Utilities               as utils 
  42  import wsatools.Astrometry              as astro 
  43  from   wsatools.ObsCalendar         import VistaCal 
  44  import wsatools.ExternalProcess         as extp 
  45  import invocations.cu13.cu13            as cu13 
  46  #------------------------------------------------------------------------------ 
  47   
48 -class FileNameFormat(object):
49 """ Defines the ESO-SAF products file naming format. 50 """ 51 fileType = "%s" #: File type string. 52 formatStr = "%s_er%s_%%s_%s_%s_%%s.%s" #: File name format string. 53
54 - def __init__(self, programme, releaseNum, fileDir=None, fileType=fileType, 55 filters="%s"):
56 """ Initialises file path for given programme / release number. 57 """ 58 self.fileType = fileType 59 progName = programme.getAcronym().lower() 60 fileExt = self._getFileExt(fileType) 61 if 'MetaData' in fileType: 62 self.formatStr = "%s_er%s_%s_%s.%s" 63 self.formatStr %= (progName, releaseNum, filters, fileType, fileExt) 64 self.fileDir = fileDir 65 if fileDir: 66 self.formatStr = os.path.join(fileDir, self.formatStr)
67 68 #-------------------------------------------------------------------------- 69
70 - def _getFileExt(self, fileType):
71 """ @returns: File name extension for the current file type. 72 @rtype: str 73 """ 74 if fileType == "%s": 75 return fileType 76 77 if fileType in ["image", "conf", "deepimage", "deepconf"]: 78 return "fits.fz" 79 80 if fileType in ["jpeg"]: 81 return "jpg" 82 83 return "fits"
84 85 #-------------------------------------------------------------------------- 86
87 - def getFilePath(self, field, fileType=None, filterName=None, 88 offsetPos=None, extNum=None, useConf=False):
89 """ @returns: Full path to file of given field. 90 @rtype: str 91 """ 92 93 if not field and 'MetaData' in self.formatStr: 94 return self.formatStr 95 96 pointing = self._getPointing(field.ra, field.dec) 97 if offsetPos is not None: 98 pointing += "_off%s" % offsetPos 99 elif "frameType" in field._fields and 'tile' in field.frameType: 100 pointing += "_tile" 101 elif "frameType" in field._fields and 'mosaic' in field.frameType: 102 pointing += "_mosaic" 103 if "filterName" in field._fields: 104 filterName = field.filterName.lower() 105 106 formatTuple = (pointing,) 107 if filterName: 108 formatTuple += (filterName,) 109 110 if self.fileType == "%s": 111 fileType = fileType or ("conf" 112 if field.frameType.endswith("conf") else "image") 113 if "deep" in field.frameType and fileType in ["conf", "image"]: 114 fileType = "deep" + fileType 115 formatTuple += (fileType,) 116 117 if not useConf: 118 if extNum: 119 formatTuple += ("%s_%s" % (field.fileID, extNum),) 120 else: 121 formatTuple += (field.fileID,) 122 else: 123 if extNum: 124 formatTuple += ("%s_%s" % (field.confID, extNum),) 125 else: 126 formatTuple += (field.confID,) 127 if self.fileType == "%s": 128 formatTuple += (self._getFileExt(fileType),) 129 return self.formatStr % formatTuple
130 131 #-------------------------------------------------------------------------- 132
133 - def _getPointing(self, ra, dec):
134 """ Converts given ra and dec position, in decimal hours/degrees, to a 135 sexagesimal string. 136 """ 137 sign = "+" if dec > 0 else "-" 138 dec = abs(dec) 139 formatStr = "%02dh%02d%s%03dd%02d" 140 141 return formatStr % (ra, 60 * (ra - int(ra)), sign, dec, 60 * (dec - int(dec)))
142 143 #------------------------------------------------------------------------------ 144
145 -class EsoRelease(CuSession):
146 """ Creates the ESO-SAF release FITS files for the given programme. 147 """ 148 #-------------------------------------------------------------------------- 149 # Class constants (access as EsoRelease.varName) 150 151 #: Number of decimal places in ra 152 radp = 2 153 154 #: Number of decimal places in dec 155 decdp = 2 156 157 #: Curation Unit number 158 cuNum = 30 159 160 #: Fields to query for deep tile image details. 161 imageFields = "fileName, Multiframe.multiframeID AS fileID, " \ 162 "raBase as ra, decBase as dec, filterName, frameType, confID" 163 164 #: Named tuple 165 ImageField = namedtuple('ImageField', 166 'fileName fileID ra dec filterName frameType confID productID') 167 168 169 #: Bibliographic references for each programme. 170 # @TODO: Add in VVV and VIKING 171 refForProg = defaultdict(lambda : "2012A&A...548A.119C", 172 VMC="2011A&A...527A.116C", 173 VIDEO="2013MNRAS.428.1281J") 174 175 #: These FITS header keywords should not be messed with. 176 reservedKeys = ["BITPIX", "NAXIS", "XTENSION", "DATE", "PCOUNT", "GCOUNT", 177 "TFIELDS", "TCTYP", "TCRVL", "TCRPX", "BSCALE", "BZERO", 178 "CTYPE", "CRPIX", "CRVAL", "CD1_", "CD2_", "PV2_"] 179 #: Dictionary of regions 180 regionDict = dict() 181 182 #: Translation for SQL type to PyFITS type. 183 sqlToFitsDataType = {'tinyint': 'B', 'smallint': 'I', 'int': 'J', 184 'bigint': 'K', 'real': 'E', 'float': 'D'} 185 for ii in range(256): 186 sqlToFitsDataType['varchar(%s)' % (ii + 1)] = '%sA' % (ii + 1) 187 188 prodBits = {0:"Processing deep images", 189 1:"Processing first calibration merged-band catalogues", 190 2:"Processing final calibration phase 3 catalogues"} 191 currentRowNumber = None 192 numberRows = 0 193 #-------------------------------------------------------------------------- 194 # Public member variable default values - set from command-line options 195 piOnly = False #: Create PI info only 196 isQuickRun = False #: Just a test run for the FITS headers? 197 releaseNum = 1 #: ESO release number. 198 fieldIDs = None #: List of fields to release (if None, release all) 199 fileName = None #: FileName to derive fieldID list from 200 addExtProv = False #: Add in external provenance 201 # useOBdata = False #: Use OB data. 202 copyJpegsPaws = False #: Copy JEGS for pawprints 203 skipDeepData = False #: Skip deep data 204 esoProductTypes = 1 #: Eso product types: deeps 1, deeps + srclists 2, 205 # deeps catalogues 3 206 releaseDBName = "NONE" #: Static release DB that release is done from 207 completeFilters = False #: Only framesets with complete set of filters 208 istTrialRun = False #: 209 newBits = set() #: 210 directoryDict = {} 211 regionInfo = None 212 delKeysDict = defaultdict(list) 213 skipFieldList = [] 214 skipSources = False 215 tileObjectDict = {} 216 isCutOut = False 217 mPhotFilesDict = defaultdict(list) 218 hasNeighTable = False 219 missingAdpFiles = False 220 reference = None 221 noExternNeigh = False 222 bandUcdDict = {} 223 ucdBandDict = {} 224 #-------------------------------------------------------------------------- 225 # Private class parameters - should not be altered 226 227 _autoCommit = True # Overrides CuSession default 228 229 230 #-------------------------------------------------------------------------- 231 # Private member variables 232 233 _fileDir = '' 234 """ Common directory path for the ESO product files. 235 """ 236 _metaSchema = '' 237 """ Dictionary of metadata table schema by name. 238 """ 239 _newNameOfFile = None 240 """ Dictionary of ESO-style file names of images referenced by 241 their original archive names. These are the components of other images. 242 """ 243 _progSchema = '' 244 """ Dictionary of programme table schema by name. 245 """ 246 247 #-------------------------------------------------------------------------- 248
249 - def _onRun(self):
250 """ Prepares each product in turn required by the ESO-SAF for the 251 current programme. 252 """ 253 # @TODO: Get minimum ID values for all tables 254 # Get release requirements - from VSA 255 256 if self.sysc.isVSA(): 257 self.bandUcdDict = { 258 'z':'em.opt.I', 259 'y':'em.IR.NIR', 260 'j':'em.IR.J', 261 'h':'em.IR.H', 262 'ks':'em.IR.K'} 263 self.ucdBandDict = dict([(self.bandUcdDict[key], key) 264 for key in self.bandUcdDict]) 265 266 267 268 self.releaseDB = DbSession(self.releaseDBName, userName="ldservrw") 269 if not self.fieldIDs: 270 self.fieldIDs = [str(fieldID) for fieldID in self.releaseDB.query( 271 "distinct fieldID", "Required%s" % 272 self.programme.getAttr("sourceProdType"), orderBy="fieldID")] 273 entriesExistER = self.archive.queryEntriesExist("EsoRelease", 274 "programmeID=%s AND releaseNum=%s" % 275 (self.programmeID, self.releaseNum)) 276 self.newProdTypes = self.esoProductTypes 277 if entriesExistER: 278 curEsoInfo = self.archive.query("productTypes,fromDb", "EsoRelease", 279 "programmeID=%s AND releaseNum=%s" % 280 (self.programmeID, self.releaseNum), firstOnly=True) 281 self.checkEsoRelease(curEsoInfo) 282 self.esoProgReq = self.archive.query("*", "RequiredEsoRelease", 283 "programmeID=%s" % self.programmeID, firstOnly=True) 284 self.useOBdata = self.esoProgReq.incOBData == 1 285 286 self.areMosaics = self.archive.queryEntriesExist( 287 "RequiredMosaic", "programmeID=%s" % self.programmeID) 288 289 self.isRegion = self.esoProgReq.grouping == 'region' 290 if self.isRegion: 291 # @TODO: Add in region parts info for object... 292 self.setRegions() 293 self._newNameOfFile = {} 294 295 self.setUpApdList() 296 # @TODO: 297 self.setTileObjDictNonRegions() 298 299 300 301 Logger.addMessage("Parsing schema...") 302 303 self._metaSchema = dict((table.name, table) 304 for table in schema.parseTables(self.sysc.metadataSchema())) 305 306 self._progSchema = dict((table.name, table) 307 for table in schema.parseTables(self.programme.getSchemaScript())) 308 309 # Check database against schema 310 Logger.addMessage("Checking that the database schema is correct...") 311 try: 312 self.releaseDB.checkSchema( 313 tableSchema=self._metaSchema.values() + self._progSchema.values(), 314 releasedOnly=not self.releaseDB.isLoadDb) 315 316 except schema.MismatchError as error: 317 raise EsoRelease.CuError(error) 318 # @TODO: ETWS to declare appropriate SystemConstant for operations use 319 self._fileDir = ("/disk14/www-data/%s-eso/" 320 % self.programme.getAcronym().lower() 321 if (os.getenv('USER') == 'scos' and 'test' not in self.releaseDB.database.lower()) else 322 os.path.join(self.sysc.testOutputPath("eso_" + os.getenv("USER") + "2"), 323 self.programme.getAcronym().lower())) 324 utils.ensureDirExist(self._fileDir) 325 utils.ensureDirExist(os.path.join(self._fileDir, 'images')) 326 Logger.addMessage("ESO DR%s files will be created in: %s" 327 % (self.releaseNum, self._fileDir)) 328 self.directoryDict['images'] = os.path.join(self._fileDir, 'images') 329 330 # self.tilePawPrintDict = self.getTilePawPrintDict() 331 332 self.getArcFileOBDict() 333 if self.esoProductTypes & 1 > 0 and not self.skipDeepData: 334 Logger.addMessage("Querying database for WFAU-products...") 335 # Get productTiles and OB tiles if needed. 336 tiles = self.queryFileProducts(fileName=self.fileName) 337 self.setPrimaryHeaderPosition(tiles) 338 self.getProgIDListDict(tiles) 339 # @TODO: If mosaics get TL_RA, TL_DEC from Provenance - 340 if self.areMosaics: 341 #self.getTilePosDict(tiles) 342 # @FIXME: This needs to be changed 343 self.sadtTilePosDict = {} 344 for tile in tiles: 345 if tile.productID <= 5: 346 self.sadtTilePosDict[tile.fileID] = (22618.000, -44358.440, -90.000) 347 else: 348 self.sadtTilePosDict[tile.fileID] = (3748.984, -432755.080, -0.000) 349 Logger.addMessage("Selected tiles") 350 # Modified copies of deep tiles + paw-prints (images + maps + jpegs) 351 self.copyDeepTiles(tiles) 352 353 # Detection catalogue files for each of these tiles 354 self.outgestCatalogues(tiles) 355 356 # Initial Merged source catalogue files for each frame set of all tiles 357 # @TODO: temporary fix - get rid off asap 358 self.magLimSatDict = self.getMagLimSatDict() 359 if self.esoProductTypes & 2 > 0: 360 self.outgestSources(isSourceList=True) 361 362 # Final Merged source catalogue files for each frame set of all tiles 363 if self.esoProductTypes & 4 > 0: 364 self.srcCatSourceIDName = 'sourceID' 365 # @NOTE: just in rare cases for testing self.skipSources = True 366 self.outgestSources(isSourceList=False) 367 # Source light curve files for each frame set of all tiles 368 if self.esoProgReq.incVariables == 1: 369 self.outgestMultiEpoch() 370 self.outgestVariables() 371 if self.areExternalCats: 372 self.outgestExternalCats() 373 self.updateDatabase()
374 #-------------------------------------------------------------------------- 375
376 - def setUpApdList(self):
377 """ 378 """ 379 directory = "/disk47/sys/eso_adp_lists" 380 inputFileListRoot = self.programme.getAcronym().upper() 381 possFiles = [fName for fName in os.listdir(directory) 382 if inputFileListRoot in fName] 383 if possFiles: 384 latestDS = max([fName.split('.')[0].split('_')[1] for fName in possFiles]) 385 fileName = os.path.join(directory, "%s_%s.txt" % (inputFileListRoot, latestDS)) 386 self.esoArchFileDict = dict(line.split() for line in file(fileName).readlines()) 387 else: 388 raise EsoRelease.CuError("No ADP file list. Query the ESO archive " 389 "http://archive.eso.org/wdb/wdb/adp/phase3_vircam/form")
390 391 392 #-------------------------------------------------------------------------- 393
394 - def setTileObjDictNonRegions(self):
395 """ 396 """ 397 # VMC / non VMC 398 tileObjList = self.archive.query("m.multiframeID,obsName", 399 "ProgrammeFrame as p,Multiframe as m,MultiframeEsoKeys as e", 400 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 401 "m.frameType like 'tile%%stack' and m.multiframeID=e.multiframeID" 402 % self.programmeID) 403 self.tileObjectDict = dict([(mfID, (obsName.split('-')[0].upper() 404 if self.programmeID == self.sysc.scienceProgs.get("VMC") else 405 obsName)) for mfID, obsName in tileObjList])
406 407 #-------------------------------------------------------------------------- 408 409 410 #-------------------------------------------------------------------------- 411
412 - def setPrimaryHeaderPosition(self, tiles):
413 """ Produces dictionary of primary header positions. 414 """ 415 mfIDs = [tile.fileID for tile in tiles] 416 frameType = self.releaseDB.query("frameType", "Multiframe", 417 "multiframeID=%s" % mfIDs[0], firstOnly=True) 418 selectStr = "multiframeID," 419 if 'tile' in frameType or 'mosaic' in frameType: 420 selectStr += "centralRa,centralDec" 421 else: 422 selectStr += "crvalx,crvaly" 423 424 raDecs = self.releaseDB.query(selectStr, "CurrentAstrometry", 425 "multiframeID in (%s) and extNum=2" % ','.join(map(str, mfIDs))) 426 self.primaryHeaderPositionDict = dict([(mfID, (ra, dec)) 427 for mfID, ra, dec in raDecs])
428 429 #-------------------------------------------------------------------------- 430 431
432 - def getTilePosDict(self, tiles):
433 """ Dictionary of tile ra/dec positions 434 @TODO: Position angle? 435 """ 436 # @FIXME: Some horrible cockup in positions 437 438 439 440 tileMfIDs = [tile.fileID for tile in tiles if 'conf' not in tile.fileName] 441 sadtTilePos = self.releaseDB.query( 442 "distinct combiframeID,tileRa,tileDec,tileOffAngle", 443 "Multiframe as m,Provenance as v", 444 "combiframeID in (%s) and v.multiframeID=m.multiframeID" 445 " and m.tileRa>0. and tileDec>=-900000" % ','.join(map(str, tileMfIDs)) 446 ) 447 self.sadtTilePosDict = {} 448 for multID in tileMfIDs: 449 tpList = [(tlRa, tlDec, tloa) 450 for mfID, tlRa, tlDec, tloa in sadtTilePos if mfID == multID] 451 if len(tpList) == 1: 452 453 self.sadtTilePosDict[multID] = tpList.pop() 454 # Sort out conf 455 tileMfIDs = [tile.fileID for tile in tiles if 'conf' in tile.fileName] 456 sadtTilePos = self.releaseDB.query( 457 "distinct md.confID,m.tileRa,m.tileDec,m.tileOffAngle", 458 "Multiframe as md,Provenance as v,Multiframe as m", 459 "v.combiframeID=md.multiframeID and md.confID in (%s) and v.multiframeID=m.multiframeID" 460 " and m.tileRa>0. and m.tileDec>=-900000" % ','.join(map(str, tileMfIDs))) 461 for multID in tileMfIDs: 462 tpList = [(tlRa, tlDec, tloa) 463 for mfID, tlRa, tlDec, tloa in sadtTilePos if mfID == multID] 464 if len(tpList) == 1: 465 self.sadtTilePosDict[multID] = tpList.pop()
466 467 468 #-------------------------------------------------------------------------- 469
470 - def getTilePawPrintDict(self):
471 """ Dictionary of tiles for each pawprint in releaseDB 472 """ 473 # Do not include deeps! Deep tiles can have shallow pawprints in them 474 # as well as deep. 475 frameTypeSelTile = queries.getFrameSelection('tile', noDeeps=True, 476 alias='mt', selType='%stack') 477 frameTypeSelStack = queries.getFrameSelection('stack', noDeeps=True, 478 alias='mp', selType='%stack') 479 480 return dict(self.releaseDB.query( 481 "mp.fileName,mt.fileName", 482 "Provenance as v,Multiframe as mp,Multiframe as mt,ProgrammeFrame as p", 483 "mp.multiframeID=v.multiframeID and mt.multiframeID=v.combiframeID " 484 "and mt.multiframeID=p.multiframeID and p.programmeID=%s and " 485 "%s AND %s" % (self.programmeID, frameTypeSelTile, frameTypeSelStack)))
486 487 #-------------------------------------------------------------------------- 488
489 - def getArcFileOBDict(self):
490 """ Dictionary of arcFiles for each OB frame 491 """ 492 # Do not include deeps! Deep tiles can have shallow pawprints in them 493 # as well as deep. 494 # @FIXME: Get all normal arcFiles 495 496 # Do tiles and stacks separately 497 # Confidence more difficult too. 498 499 self.arcFileOBDict = defaultdict(list) 500 501 frameTypeSel = queries.getFrameSelection('stack', noDeeps=True, 502 alias='m', selType='%stack') 503 504 arcFileList = self.releaseDB.query( 505 "m.fileName,m2.arcFile,mc.arcFile", 506 "Multiframe as m,ProgrammeFrame as p,Provenance as v," 507 "Multiframe as m2, Multiframe as mc", 508 "m.multiframeID=p.multiframeID and p.programmeID=%s and %s and " 509 "v.combiframeID=m.multiframeID and v.multiframeID=m2.multiframeID " 510 "and mc.multiframeID=m2.confID" % 511 (self.programmeID, frameTypeSel)) 512 uniqueFileNames = set([fName for fName, _aFile, _aFileC in arcFileList]) 513 for fileName in uniqueFileNames: 514 self.arcFileOBDict[fileName] = [aFile for fName, aFile, _aFileC in arcFileList 515 if fName == fileName] 516 self.arcFileOBDict[fits.getConfMap(fileName)] = [aFileC for fName, _aFile, aFileC in arcFileList 517 if fName == fileName] 518 frameTypeSel = queries.getFrameSelection('tile', noDeeps=True, 519 alias='m', selType='%stack') 520 521 arcFileList = self.releaseDB.query( 522 "m.fileName,m2.arcFile,mc.arcFile", 523 "Multiframe as m,ProgrammeFrame as p,Provenance as v," 524 "Provenance as v2,Multiframe as m2, Multiframe as mc", 525 "m.multiframeID=p.multiframeID and p.programmeID=%s and %s and " 526 "v.combiframeID=m.multiframeID and v.multiframeID=v2.combiframeID " 527 "and v2.multiframeID=m2.multiframeID and mc.multiframeID=m2.confID" % 528 (self.programmeID, frameTypeSel)) 529 uniqueFileNames = set([fName for fName, _aFile, _aFileC in arcFileList]) 530 for fileName in uniqueFileNames: 531 self.arcFileOBDict[fileName] = [aFile for fName, aFile, _aFileC in arcFileList 532 if fName == fileName] 533 self.arcFileOBDict[fits.getConfMap(fileName)] = [aFileC for fName, _aFile, aFileC in arcFileList 534 if fName == fileName]
535 536 537 #-------------------------------------------------------------------------- 538 539
540 - def getMagLimSatDict(self):
541 """ 542 """ 543 544 productType = 'mosaic' if self.areMosaics else 'tile' 545 fieldIDStr = (" and r.fieldID in (%s)" % ','.join(map(str, self.fieldIDs)) 546 if self.fieldIDs else "") 547 548 mfIDs = self.releaseDB.query( 549 "m.multiframeID", 550 "ProgrammeFrame as p,Required%s as r,Multiframe as m" % productType, 551 "r.programmeID=%s and r.programmeID=p.programmeID and " 552 "r.productId=p.productID and p.multiframeID=m.multiframeID and " 553 "m.deprecated=0 and m.frameType like '%%%s%%stack'%s" 554 % (self.programmeID, productType, fieldIDStr)) 555 # For VVV, more complicated - want colours from single-epoch... 556 # @TODO: Best get all mfIDs for now and sort out in next release 557 if self.programmeID == 120: 558 mfIDs = self.releaseDB.query( 559 "m.multiframeID", 560 "ProgrammeFrame as p,Multiframe as m", 561 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 562 "m.deprecated in (0,68) and m.frameType like '%%%s%%stack'" 563 % (self.programmeID, productType)) 564 # Check releaseDB 565 fileNameList = self.releaseDB.query( 566 selectStr="fileName+'s.fz',catName", 567 fromStr="Multiframe", 568 whereStr="multiframeID in (%s)" % ','.join(map(str, mfIDs))) 569 missingAdpFiles = [] 570 for fileName, catName in fileNameList: 571 origName = os.path.basename(fileName) 572 if not origName in self.esoArchFileDict: 573 missingAdpFiles.append(origName + '\n') 574 origName = os.path.basename(catName) 575 if not origName in self.esoArchFileDict: 576 missingAdpFiles.append(origName + '\n') 577 if missingAdpFiles: 578 outFileName = "/disk47/sys/eso_adp_lists/missing%s.list" % self.programme.getAcronym() 579 file(outFileName, "w").writelines(missingAdpFiles) 580 # raise EsoRelease.CuError("Missing ADP Files: %s " % outFileName) 581 releaseDBRes = self.releaseDB.query( 582 selectStr="multiframeID,abMagLim,abSatMag", 583 fromStr="MultiframeDetector", 584 whereStr="multiframeID in (%s) and abMagLim>0 and abSatMag>0" 585 % ','.join(map(str, mfIDs))) 586 587 missMfIDs = set(mfIDs).difference(set(rdb.multiframeID for rdb in releaseDBRes)) 588 # Check mainDB 589 mainDBRes = self.archive.query( 590 selectStr="multiframeID,abMagLim,abSatMag", 591 fromStr="MultiframeDetector", 592 whereStr="multiframeID in (%s) and abMagLim>0 and abSatMag>0" 593 % ','.join(map(str, missMfIDs))) 594 missMfIDs = missMfIDs.difference(set(mdb.multiframeID for mdb in mainDBRes)) 595 # Calc 596 calcRes = [] 597 progress = ForLoopMonitor(missMfIDs) 598 Logger.addMessage("Calculating new magnitude limits and saturation limits...") 599 for mfID in missMfIDs: 600 abMagLim = self.getAbMagLim(mfID, 2) 601 abSatMag = self.getSatLimit(mfID, 2) 602 # update VSA... 603 self.archive.update("MultiframeDetector", 604 [("abMagLim", abMagLim), ("abSatMag", abSatMag)], 605 where="multiframeID=%s and extNum=2" % mfID) 606 calcRes.append((mfID, abMagLim, abSatMag)) 607 progress.testForOutput("") 608 allRes = releaseDBRes + mainDBRes + calcRes 609 return dict([(mfID, (abMagLim, abSatMag)) 610 for mfID, abMagLim, abSatMag in allRes])
611 612 #-------------------------------------------------------------------------- 613
614 - def areExternalCats(self):
615 """ Are there external catalogues? 616 """ 617 # archive or releaseDB? Dangers both ways... 618 619 return self.archive.queryEntriesExist( 620 "ExternalProduct", "programmeID=%s AND productType != 'mosaic'" % 621 self.programmeID)
622 #-------------------------------------------------------------------------- 623
624 - def copyDeepTiles(self, tiles):
625 """ 626 Copy WFAU-produced deep tile image and confidence map FITS files to the 627 ESO-SAF transfer staging area. Modifying headers to meet the ESO-SAF 628 specifications. 629 630 """ 631 fieldOfID = dict((tile.fileID, tile) for tile in tiles) 632 633 imageOfConf = \ 634 dict((tile.confID, tile.fileID) for tile in tiles 635 if 'conf' not in tile.frameType) 636 fnFormat = \ 637 FileNameFormat(self.programme, self.releaseNum, 638 os.path.join(self._fileDir, 'images')) 639 640 filesToCopy = [] 641 Logger.addMessage("%s tiles " % len(tiles)) 642 newFilePathDict = dict([(tile, fnFormat.getFilePath(tile)) for tile in tiles]) 643 for tile in tiles: 644 self._newNameOfFile[tile.fileName] = newFilePathDict[tile] 645 646 if self.isRegion: 647 648 self.regionDict[tile.fileID] = self.getRegion(self, 649 raDec=[tile.ra, tile.dec], useFrameSetID=False) 650 self.tileObjectDict[tile.fileID] = self.programme.getAcronym().upper() + "/" + self.regionDict[tile.fileID][1] 651 652 653 654 for tile in tiles: 655 newFilePath = newFilePathDict[tile] 656 isDeep = 'deep' in newFilePath 657 # isProduct = tile.productID > 0 658 self.updateDirDict(newFilePath) 659 jpegFiles = [(jpegFile.compFile, 660 fnFormat.getFilePath(tile, fileType="jpeg", 661 extNum=jpegFile.extNum)) 662 for jpegFile in self.queryJpegFiles(tile)] 663 jpegFilesConf = [] 664 if tile.confID > 0: 665 jpegFilesConf = [(jpegFile.compFile, 666 fnFormat.getFilePath(tile, fileType="jpeg", 667 extNum=jpegFile.extNum, 668 useConf=True)) 669 for jpegFile in self.queryJpegFiles(tile, 670 isConf=True)] 671 tile = tile._replace( 672 confID=fnFormat.getFilePath(fieldOfID[tile.confID])) 673 674 675 if not os.path.exists(newFilePath): 676 filesToCopy.append((tile, newFilePath, jpegFiles, 677 jpegFilesConf)) 678 679 # Query database for deep paw-print components of deep tiles 680 # IF IS PRODUCT... 681 ppOffsets = None 682 if isDeep: 683 ppOffsets = dict(self.releaseDB.query( 684 selectStr="m2.%s, offsetPos" % ("confID" 685 if 'conf' in tile.frameType else "multiframeID"), 686 fromStr="Provenance AS pv, Multiframe AS m1, Multiframe AS m2" 687 ", RequiredStack AS s, ProgrammeFrame AS p", 688 whereStr=("m1.confID=%s" % tile.fileID 689 if 'conf' in tile.frameType else 690 "m1.multiframeID=%s" % tile.fileID) + 691 " AND m1.multiframeID=pv.combiframeID" 692 " AND pv.multiframeID=m2.multiframeID" 693 " AND m2.frameType LIKE '%%stack'" 694 " AND p.multiframeID=m2.multiframeID" 695 " AND p.programmeID=%s AND p.programmeID=s.programmeID" 696 " AND p.productID=s.productID" 697 % (self.programmeID))) 698 # elif not isDeep and self.useOBdata: 699 # # @TODO: Don't use OB pawprints 700 # # @TODO: How many layers? 701 # # OB confs do not have productIDs 702 # # Use Provenance - multiframeID, use offsetX,offsetY to get offPos 703 # offsets = self.releaseDB.query( 704 # selectStr="m.multiframeID, m.offsetX, m.offsetY", 705 # fromStr="Provenance AS pv,Multiframe AS m", 706 # whereStr="pv.multiframeID=m.multiframeID AND " 707 # "pv.combiframeID = %s" % tile.fileID) 708 # ppOffsets = dict((mfID, astro.getOffSetPos(offX, offY)) 709 # for mfID, offX, offY in offsets) 710 if ppOffsets: 711 # @TODO: Move this to a single query earlier - more efficient 712 mfIDs = [key for key in ppOffsets] 713 for deepPawPrint in self.queryFileProducts(mfIDs=mfIDs, isPawPrint=True): 714 newFilePath = fnFormat.getFilePath(deepPawPrint, 715 offsetPos=ppOffsets[deepPawPrint.fileID]) 716 self.updateDirDict(newFilePath) 717 jpegFilesConf = [] 718 jpegFiles = [(jpegFile.compFile, 719 fnFormat.getFilePath(deepPawPrint, 720 fileType="jpeg", 721 offsetPos=ppOffsets[deepPawPrint.fileID], 722 extNum=jpegFile.extNum)) 723 for jpegFile in self.queryJpegFiles(deepPawPrint) 724 if self.copyJpegsPaws] 725 self._newNameOfFile[deepPawPrint.fileName] = newFilePath 726 fieldOfID[deepPawPrint.fileID] = deepPawPrint 727 if 'conf' not in deepPawPrint.frameType: 728 imageOfConf[deepPawPrint.confID] = deepPawPrint.fileID 729 # Assign name of confidence map file to image file details 730 if deepPawPrint.confID > 0: 731 jpegFilesConf = [(jpegFile.compFile, 732 fnFormat.getFilePath(deepPawPrint, 733 fileType="jpeg", 734 offsetPos=ppOffsets[deepPawPrint.fileID], 735 extNum=jpegFile.extNum, useConf=True)) 736 for jpegFile in self.queryJpegFiles(deepPawPrint, 737 isConf=True) 738 if self.copyJpegsPaws] 739 if deepPawPrint.fileID in ppOffsets and deepPawPrint.confID in fieldOfID: 740 deepPawPrint = deepPawPrint._replace( 741 confID=fnFormat.getFilePath( 742 field=fieldOfID[deepPawPrint.confID], 743 offsetPos=ppOffsets[deepPawPrint.fileID])) 744 else: 745 deepPawPrint = deepPawPrint._replace( 746 confID=None) 747 748 if not os.path.exists(newFilePath): 749 filesToCopy.append((deepPawPrint, newFilePath, jpegFiles, 750 jpegFilesConf)) 751 752 Logger.addMessage("Copying %s WFAU-products..." % len(filesToCopy)) 753 filesToCopy.sort(key=itemgetter(1)) # sort by file name 754 progress = ForLoopMonitor(filesToCopy) 755 for field, newFilePath, jpegFiles, jpegFilesConf in filesToCopy: 756 # Copy JPEGs first in case of error 757 newJpegFiles = self.copyJpegFiles(jpegFiles, field) 758 newJpegFilesConf = [newjp for _oldjp, newjp in jpegFilesConf] 759 # Copy WFAU-deep-product 760 shutil.copy2(field.fileName, newFilePath) 761 762 763 # Modify header in an exception trap to ensure completion 764 try: 765 # 25% overhead but I/O limited, so parallelisation doesn't help 766 767 fits.uncompressFits([newFilePath]) 768 if not os.path.exists(newFilePath): 769 os.rename(newFilePath.split('.fz')[0], newFilePath) 770 771 # Take header metadata from image for confidence files 772 if field.frameType.endswith("conf"): 773 field = field._replace(confID=field.fileName) 774 field = field._replace(fileID=imageOfConf[field.fileID]) 775 field = field._replace( 776 fileName=fieldOfID[field.fileID].fileName) 777 self.modHeaders(field, newFilePath, newJpegFiles, newJpegFilesConf) 778 779 except: 780 # If incomplete, then remove, so will be recreated next time 781 if os.path.exists(newFilePath): 782 os.remove(newFilePath) 783 784 raise 785 786 progress.testForOutput()
787 788 #-------------------------------------------------------------------------- 789
790 - def copyJpegFiles(self, jpegFiles, field):
791 """ Copies the given list of JPEG files to the ESO release staging 792 area. 793 """ 794 newJpegFiles = [] 795 for oldJpegPath, newJpegPath in jpegFiles: 796 newJpegFiles.append(newJpegPath) 797 if not os.path.exists(newJpegPath): 798 if oldJpegPath == dbc.charDefault(): 799 raise EsoRelease.CuError("JPEG entries in the database are" 800 " incomplete for multiframeID %s (filename: %s)" 801 % (field.fileID, field.fileName)) 802 803 shutil.copy2(oldJpegPath, newJpegPath) 804 return newJpegFiles
805 806 #-------------------------------------------------------------------------- 807
808 - def jpegCards(self, jpegFile, asNum):
809 """ @return: A list of FITS header card details for the given JPEG file. 810 @rtype: list(tuple(str, PyValue, str)) 811 """ 812 md5Sum = hashlib.md5(open(jpegFile, 'rb').read()).hexdigest() 813 jpegFile = os.path.basename(jpegFile) 814 815 return [('ASSON%d' % asNum, jpegFile, 816 self.getStdShortDesc(jpegFile, 'Name of associated file')), 817 818 ('ASSOC%d' % asNum, 'ancillary.preview'.upper(), 819 'Category of associated file'), 820 821 ('ASSOM%d' % asNum, md5Sum, self.getStdShortDesc(md5Sum, 822 'MD5 checksum of associated file'))]
823 824 #-------------------------------------------------------------------------- 825
826 - def getProgIDListDict(self, tiles):
827 """ 828 For each product, get a list of PROG_IDs from inputs 829 830 MultiframeEsokeys.obsProgID 831 """ 832 self.progIDListDict = defaultdict(list) 833 if self.areMosaics: 834 prodMfIDList = [tile.fileID for tile in tiles if 'conf' not in tile.fileName] 835 progIDList = self.releaseDB.query( 836 "distinct v.combiframeID,obsProgID", 837 "Provenance as v,MultiframeEsoKeys as e", 838 "combiframeID in (%s) and v.multiframeID=e.multiframeID" 839 % ','.join(map(str, prodMfIDList))) 840 841 for multiframeID in prodMfIDList: 842 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 843 if mfID == multiframeID] 844 # Sort out conf 845 prodMfIDList = [tile.fileID for tile in tiles if 'conf' in tile.fileName] 846 progIDList = self.releaseDB.query( 847 "distinct m.confID,obsProgID", 848 "Multiframe as m,Provenance as v,MultiframeEsoKeys as e", 849 "v.combiframeID=m.multiframeID and m.confID in (%s) and v.multiframeID=e.multiframeID" 850 % ','.join(map(str, prodMfIDList))) 851 for multiframeID in prodMfIDList: 852 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 853 if mfID == multiframeID] 854 else: 855 # Go down to lower level... 856 prodMfIDList = [tile.fileID for tile in tiles 857 if 'conf' not in tile.fileName and '_dp' in tile.fileName] 858 # deep tiles 859 if len(prodMfIDList) > 0: 860 progIDList = self.releaseDB.query( 861 "distinct v.combiframeID,obsProgID", 862 "Provenance as v,Provenance as v2,Multiframe as m,MultiframeEsoKeys as e", 863 "v.combiframeID in (%s) and v.multiframeID=v2.combiframeID and " 864 "m.multiframeID=v2.combiframeID and m.frameType='deepstack' and " 865 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 866 for multiframeID in prodMfIDList: 867 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 868 if mfID == multiframeID] 869 # deep stacks. 870 progIDList = self.releaseDB.query( 871 "distinct v2.combiframeID,obsProgID", 872 "Provenance as v,Provenance as v2,Multiframe as m,MultiframeEsoKeys as e", 873 "v.combiframeID in (%s) and v.multiframeID=v2.combiframeID and " 874 "m.multiframeID=v2.combiframeID and m.frameType='deepstack' and " 875 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 876 deepStackMfIDs = set([mfID for mfID, _opi in progIDList]) 877 for multiframeID in deepStackMfIDs: 878 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 879 if mfID == multiframeID] 880 # tiles. 881 prodMfIDList = [tile.fileID for tile in tiles 882 if 'conf' not in tile.fileName and '_dp' not in tile.fileName] 883 if len(prodMfIDList) > 0: 884 progIDList = self.releaseDB.query( 885 "distinct v.combiframeID,obsProgID", 886 "Provenance as v,MultiframeEsoKeys as e", 887 "combiframeID in (%s) and v.multiframeID=e.multiframeID" 888 % ','.join(map(str, prodMfIDList))) 889 for multiframeID in prodMfIDList: 890 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 891 if mfID == multiframeID] 892 # deep tile conf 893 894 prodMfIDList = [tile.fileID for tile in tiles 895 if 'conf' in tile.fileName and '_dp' in tile.fileName] 896 if len(prodMfIDList) > 0: 897 progIDList = self.releaseDB.query( 898 "distinct m.confID,obsProgID", 899 "Provenance as v,Provenance as v2,Multiframe as m,Multiframe as m2," 900 "MultiframeEsoKeys as e", 901 "v.combiframeID=m.multiframeID and m.confID in (%s) and " 902 "v.multiframeID=v2.combiframeID and " 903 "m2.multiframeID=v2.combiframeID and m2.frameType='deepstack' and " 904 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 905 for multiframeID in prodMfIDList: 906 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 907 if mfID == multiframeID] 908 909 # deep stack conf 910 prodMfIDList = [tile.fileID for tile in tiles 911 if 'conf' not in tile.fileName and '_dp' in tile.fileName] 912 if len(prodMfIDList) > 0: 913 progIDList = self.releaseDB.query( 914 "distinct m2.confID,obsProgID", 915 "Provenance as v,Provenance as v2,Multiframe as m2," 916 "MultiframeEsoKeys as e", 917 "v.combiframeID in (%s) and " 918 "v.multiframeID=v2.combiframeID and " 919 "m2.multiframeID=v2.combiframeID and m2.frameType='deepstack' and " 920 "v2.multiframeID=e.multiframeID" % ','.join(map(str, prodMfIDList))) 921 deepConfMfIDs = set([mfID for mfID, _opi in progIDList]) 922 for multiframeID in deepConfMfIDs: 923 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 924 if mfID == multiframeID] 925 # tile conf 926 prodMfIDList = [tile.fileID for tile in tiles 927 if 'conf' in tile.fileName and '_dp' not in tile.fileName] 928 if len(prodMfIDList) > 0: 929 progIDList = self.releaseDB.query( 930 "distinct m.co3nfID,obsProgID", 931 "Multiframe as m,Provenance as v,MultiframeEsoKeys as e", 932 "v.combiframeID=m.multiframeID and m.confID in (%s) and v.multiframeID=e.multiframeID" 933 % ','.join(map(str, prodMfIDList))) 934 for multiframeID in prodMfIDList: 935 self.progIDListDict[multiframeID] = [opi for mfID, opi in progIDList 936 if mfID == multiframeID] 937 # ANd use frameset - mergelog 938 frameSetIDs = ','.join( 939 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 940 for fieldID in self.fieldIDs) 941 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 942 mfIDCols = ','.join([column.name for column in mergeLog.columns if "mfID" in column.name]) 943 mergeLogInfo = self.releaseDB.query("frameSetID,%s" % mfIDCols, 944 self.programme.getMergeLogTable(), "frameSetID in (%s)" % frameSetIDs) 945 for mlInfo in mergeLogInfo: 946 frameSetID = mlInfo[0] 947 progIDList = set() 948 for mfID in mlInfo[1:]: 949 progIDList = progIDList.union(self.progIDListDict[mfID]) 950 self.progIDListDict[frameSetID] = list(progIDList)
951 952 953 #-------------------------------------------------------------------------- 954 955
956 - def modHeaders(self, field, filePath, jpegFiles, jpegFilesConf):
957 """ 958 Modifies headers of the given WFAU-produced FITS file to meet the 959 ESO-SAF specifications. 960 961 """ 962 delKeysList = [] 963 isConf = field.frameType.endswith("conf") 964 isPawPrint = '_off' in filePath 965 isMosaic = '_mosaic' in filePath 966 isDeep = '_deep' in filePath 967 # isTile = '_tile' in filePath 968 Logger.addMessage("Modifying header for " + filePath, alwaysLog=False) 969 970 delPrimKeys = set([] if isPawPrint else ["MJD-OBS", "MJD-END"]) 971 delExtKeys = set([]) 972 # Get data from archive 973 primaryInfo = self.releaseDB.query( 974 selectStr="instrument, filterName, object, creationDate, equinox" 975 ", raDecSys, expTime, obsProgID, dprTech, njitter, noffsets" 976 ", nustep, sadtSurveyID, tileRa, tileDec, tileOffAngle" 977 ", project, obsName, detNdit, casuVers", 978 fromStr=Join(["Multiframe", "MultiframeEsoKeys"], "multiframeID"), 979 whereStr="Multiframe.multiframeID=%s" % field.fileID, 980 firstOnly=True) 981 982 extensInfo = self.releaseDB.query( 983 selectStr="MultiframeDetector.extNum, photZPCat, photZPErrCat" 984 ", totalExpTime, abMagLim, seeing, avStellarEll" 985 ", xPixSize, yPixSize, deprecated", 986 fromStr=Join(["MultiframeDetector", "CurrentAstrometry"], 987 ["multiframeID", "extNum"]), 988 whereStr="MultiframeDetector.multiframeID=%s" % field.fileID, 989 orderBy="MultiframeDetector.extNum") 990 991 # @TODO: is this necessary now? 992 provInfo = Provenance(self.releaseDB, field.fileName) 993 epsReg = primaryInfo.project 994 objectStr = (primaryInfo.obsName.split('-')[0].upper() 995 if self.programmeID == self.sysc.scienceProgs.get("VMC") else 996 primaryInfo.obsName) 997 if self.programmeID == self.sysc.scienceProgs.get("VMC"): 998 epsReg += '-' + objectStr 999 1000 elif field and self.isRegion and self.regionDict[field.fileID]: 1001 epsReg += '/' + self.regionDict[field.fileID][0] 1002 objectStr = primaryInfo.project + "/" + self.regionDict[field.fileID][1] 1003 self.tileObjectDict[field.fileID] = objectStr 1004 # @FIXME: Remove code duplication and tidy from this point on. 1005 1006 1007 1008 tIntpPix = numpy.median([ext.totalExpTime for ext in extensInfo 1009 if ext.deprecated == 0]) 1010 1011 1012 primaryCards = [ 1013 ("ORIGIN", "ESO-PARANAL", "European Southern Observatory") 1014 , ("DATE", 1015 fits.formatDateTime(primaryInfo.creationDate, isDiffSec=False), 1016 "Date the file was written") 1017 1018 , ('TELESCOP', "ESO-VISTA", 'ESO Telescope designation') 1019 , ('INSTRUME', primaryInfo.instrument, 'Instrument name') 1020 , ('FILTER', primaryInfo.filterName, 'Filter name') 1021 , ('OBJECT', objectStr, 'Target designation') 1022 , ('EQUINOX', primaryInfo.equinox, 'Standard FK5 (years)') 1023 , ('RADECSYS', 'FK5', 'Coordinate reference frame') 1024 , ('EXPTIME', round(tIntpPix, 1), 1025 'Total integration time per pixel (s)') 1026 1027 , ('TEXPTIME', float(provInfo.getSumTotalExpTime() or tIntpPix), 1028 'Total integration time of all exposures (s)') 1029 1030 , ('MJD-OBS', provInfo.getMjdObs(), 'Start of observations (days)') 1031 , ('MJD-END', provInfo.getMjdEnd(), 'End of observations (days)') 1032 , ("DATE-OBS", 1033 fits.formatDateTime(provInfo.getDateObs()), 1034 "Date the observation was started (UTC)")] 1035 1036 if len(self.progIDListDict[field.fileID]) > 1: 1037 primaryCards += [('PROG_ID', 'MULTI', 'ESO programme identification')] 1038 for ii, opi in enumerate(self.progIDListDict[field.fileID]): 1039 primaryCards += [('PROGID%s' % (ii + 1), opi, 1040 'ESO programme identification')] 1041 else: 1042 primaryCards += [('PROG_ID', self.progIDListDict[field.fileID][0], 1043 'ESO programme identification')] 1044 for ind, oblock in enumerate(provInfo.getObsIDList()): 1045 primaryCards.append( 1046 ('OBID%d' % (ind + 1), oblock, 'Observation block ID')) 1047 1048 primaryCards += [ 1049 ('M_EPOCH', True, 'TRUE if resulting from multiple epochs'), 1050 ('SINGLEXP', False, 'TRUE if resulting from single exposure'), 1051 ('NCOMBINE', provInfo.getNoRawFrames(), 1052 '# of combined raw science data files')] 1053 # index = 1 1054 provFileList = provInfo.getPrevLayer() if isDeep else self.arcFileOBDict[field.fileName] 1055 finalProvList = [] 1056 for provFile in provFileList: 1057 # Use origFile 1058 if not isDeep: 1059 pvFileName = provFile 1060 desc = self.getStdShortDesc(pvFileName, 'Originating raw science file') 1061 finalProvList.append((pvFileName, desc)) 1062 1063 else: 1064 pvFileName = provFile.name 1065 if isConf: 1066 pvFileName = fits.getConfMap(pvFileName) 1067 if (self.sysc.deepSuffix not in pvFileName and 1068 self.sysc.tileSuffix not in pvFileName and self.sysc.isVSA()): 1069 pvFileNameList = self.arcFileOBDict[pvFileName] 1070 for pvFileName in pvFileNameList: 1071 desc = self.getStdShortDesc(pvFileName, 'Originating raw science file') 1072 finalProvList.append((pvFileName, desc)) 1073 1074 else: 1075 pvFileName = self._newNameOfFile[pvFileName] 1076 pvFileName = os.path.basename(pvFileName) 1077 desc = self.getStdShortDesc(pvFileName, 'Originating science product file') 1078 pvFileName = pvFileName + "s.fz" if "s.fz" not in pvFileName else pvFileName 1079 finalProvList.append((pvFileName, desc)) 1080 1081 1082 finalProvList = sorted(list(set(finalProvList))) 1083 for index, (pvFileName, desc) in enumerate(finalProvList): 1084 primaryCards.append(('PROV%d' % (index + 1), pvFileName, desc)) 1085 1086 obsTech = ('IMAGE,JITTER' if isMosaic and primaryInfo.dprTech == 'NONE' 1087 else primaryInfo.dprTech.replace('&#044;', ',')) 1088 primaryCards.append(('OBSTECH', obsTech, "Technique of observation")) 1089 1090 if not isConf: 1091 primaryCards.append(('PRODCATG', 1092 ('science.image'.upper() if not isPawPrint else 'science.MEFimage'.upper()), 1093 'Data product category')) 1094 1095 primaryCards += [ 1096 ("IMATYPE", "TILE" if not isPawPrint else "PAWPRINT", 1097 "Specific image type"), 1098 1099 ("ISAMP", isPawPrint, 1100 "TRUE if image represents partially sampled sky"), 1101 1102 ("FLUXCAL", "ABSOLUTE", 'Certifies the validity of PHOTZP')] 1103 1104 1105 1106 # primaryCards.append(('PROCSOFT',softVers,'Reduction software version')) 1107 reference = EsoRelease.refForProg[self.programme.getAcronym().upper()] 1108 primaryCards.append(('REFERENC', reference, 'Bibliographic reference')) 1109 if not isConf: 1110 asson = os.path.basename(field.confID) 1111 desc = self.getStdShortDesc(asson, 'Name of associated file') 1112 primaryCards += [('ASSON1', asson, desc), 1113 ('ASSOC1', 'ancillary.weightmap'.upper(), 'Category of associated file')] 1114 1115 if not isPawPrint and not isConf: 1116 primaryCards += \ 1117 self.jpegCards(jpegFiles[0], asNum=2) 1118 primaryCards += \ 1119 self.jpegCards(jpegFilesConf[0], asNum=3) 1120 tilePos = self.calcTilePos(field.fileID, isPawPrint) 1121 tileRa = (primaryInfo.tileRa if (primaryInfo.tileRa > 0. and not isMosaic) else 1122 tilePos.tileRa) 1123 1124 tileDec = (primaryInfo.tileDec if (primaryInfo.tileDec >= -900000. and not isMosaic) else 1125 tilePos.tileDec) 1126 1127 tileOffAngle = round(primaryInfo.tileOffAngle 1128 if primaryInfo.tileOffAngle >= -900000. else 1129 tilePos.tileOffAngle, 3) 1130 if isMosaic: 1131 tileRa, tileDec, tileOffAngle = self.sadtTilePosDict[field.fileID] 1132 1133 # @FIXME: Why are we rounding the precision? Is this a requirement of 1134 # ESO or is it just to save space? If the latter it won't work 1135 # because Python always allocates maximum precision for floats. 1136 1137 1138 primaryCards += [ 1139 ("TL_RA", round(tileRa, 3), "Tile RA [HHMMSS.TTT]"), 1140 ("TL_DEC", round(tileDec, 3), "Tile Declination [DDMMSS.TTT]"), 1141 ("TL_OFFAN", tileOffAngle, "Tile rotator offset angle [deg]")] 1142 1143 # @FIXME: regional set up... 1144 1145 primaryCards.append( 1146 ('EPS_REG', epsReg, 'ESO public survey region name')) 1147 if provInfo.isSameJitterPattern() and primaryInfo.njitter > 0: 1148 primaryCards += [ 1149 ("NJITTER", primaryInfo.njitter, "Number of jitter positions"), 1150 ("NOFFSETS", primaryInfo.noffsets, "Number of offset positions"), 1151 ("DIT", primaryInfo.expTime, "Integration Time")] 1152 else: 1153 delPrimKeys.update(["NJITTER", "NOFFSETS", "DIT", "NDIT"]) 1154 delKeysList.extend(["NJITTER", "NOFFSETS", "DIT", "NDIT"]) 1155 primaryCards += [ 1156 ("NUSTEP", primaryInfo.nustep, "Number of microstep positions"), 1157 ("ORIGFILE", 1158 os.path.basename(field.confID if isConf else field.fileName), 1159 "Original WFAU filename")] 1160 if "deep" not in field.frameType: 1161 primaryCards += [("NDIT", primaryInfo.detNdit, "Number of dits")] 1162 1163 jpegAsNum = 1 if isConf else 2 1164 extensionCards = {} 1165 for extNo, extInfo in enumerate(extensInfo): # , jpegFile in zip(extensInfo, jpegFiles): 1166 # Astrometry should be automatic 1167 photZPEso = extInfo.photZPCat 1168 photZPEso += 2.5 * math.log10(primaryInfo.expTime) 1169 if self.esoProgReq.photSys == 'AB': 1170 vegaToAB = self.releaseDB.query("vegaToAB", "Filter", "shortName='%s'" 1171 % primaryInfo.filterName, firstOnly=True) 1172 photZPEso += vegaToAB 1173 abSatLimit = self.getSatLimit(field.fileID, extInfo.extNum) 1174 seeing = extInfo.seeing 1175 seeing *= math.sqrt(extInfo.xPixSize * extInfo.yPixSize) 1176 abMagLim = extInfo.abMagLim if extInfo.abMagLim > 0. else self.getAbMagLim(field.fileID, extInfo.extNum) 1177 1178 extCards = [ 1179 ("EXTVER", extInfo.extNum - 1, "FITS Extension version"), 1180 ("INHERIT", True, "Primary header keywords are inherited"), 1181 ("BUNIT", "ADU", "Physical unit of array values"), 1182 ("PHOTZP", round(photZPEso, 3), "Photometric zeropoint"), 1183 ("PHOTZPER", round(extInfo.photZPErrCat, 3), 1184 "Uncertainty on PHOTZP"), 1185 1186 ("PHOTSYS", self.esoProgReq.photSys, "Photometric system"), 1187 ("ABMAGLIM", round(abMagLim, 3), 1188 "5-sigma limiting magnitude for point sources"), 1189 1190 ("ABMAGSAT", round(abSatLimit, 3), 1191 "Saturation limit for point sources (AB mags)"), 1192 1193 ("PSF_FWHM", round(seeing, 4), "Spatial resolution (arcsec)"), 1194 ("ELLIPTIC", round(extInfo.avStellarEll, 4), 1195 "Average ellipticity of point sources")] 1196 # Update VSA with abMagLim and abSatLim 1197 self.archive.update( 1198 "MultiframeDetector", [("abMagLim", abMagLim), ("abSatMag", abSatLimit)], 1199 where="multiframeID=%s and extNum=%s" % (field.fileID, extInfo.extNum)) 1200 if isPawPrint and not isConf and self.copyJpegsPaws: 1201 primaryCards += self.jpegCards(jpegFiles[extNo], jpegAsNum) 1202 jpegAsNum += 1 1203 extensionCards[extInfo.extNum] = extCards 1204 1205 # Remove unnecessary cards. 1206 for ext in extensionCards: 1207 delPrimKeys.update(card[0] for card in extensionCards[ext]) 1208 1209 delExtKeys.update(card[0] for card in primaryCards) 1210 delExtKeys.update(["MAGZPT", "MAGZRR"]) 1211 1212 1213 if isMosaic: 1214 fitsFile = fits.open(filePath) 1215 hdulist = pyfits.HDUList(pyfits.PrimaryHDU(fitsFile[1].data)) 1216 for ext in range(2): 1217 for card in fitsFile[ext].header.ascardlist(): 1218 if card.key != 'ORIGNAME' and card.key != 'CONTINUE': 1219 self.addCard(hdulist[0].header, card.key 1220 if not card.key.startswith('ESO') else 'HIERARCH ' + card.key, 1221 card.value, card.comment) 1222 # Update with image centre..... 1223 raDec = self.primaryHeaderPositionDict[field.fileID] 1224 1225 if card.key == 'RA': 1226 comment = 'RA (J2000) image centre (deg)' 1227 self.addCard(hdulist[0].header, card.key, raDec[0], comment) 1228 if card.key == 'DEC': 1229 comment = 'DEC (J2000) image centre (deg)' 1230 self.addCard(hdulist[0].header, card.key, raDec[1], comment) 1231 hdulist.writeto(filePath.replace('.fz', '')) 1232 os.remove(filePath) 1233 os.rename(filePath.replace('.fz', ''), filePath) 1234 fitsFile = fits.open(filePath, 'update') 1235 for ii, hdu in enumerate(fitsFile): 1236 removeKeys = \ 1237 [key for key in hdu.header if 'PROV' in key or 'ESO ' in key] 1238 1239 for key in removeKeys: 1240 del hdu.header[key] 1241 1242 # Uncomment if using with cfitsio<3280 1243 # if ii is 0: # Primary - no keys for tiles, only paw-prints 1244 # if isPawPrint: 1245 # for key, value, desc in primaryCards: 1246 # hdu.header.update(key, value, desc) 1247 # 1248 # for key in delPrimKeys: 1249 # if key in hdu.header: 1250 # del hdu.header[key] 1251 # else: 1252 # extNum = ii + 1 1253 # for key, value, desc in extensionCards[extNum]: 1254 # hdu.header.update(key, value, desc) 1255 # 1256 # if not isPawPrint: 1257 # for key, value, desc in primaryCards: 1258 # hdu.header.update(key, value, desc) 1259 # else: 1260 # for key in delExtKeys: 1261 # if key in hdu.header: 1262 # del hdu.header[key] 1263 # 1264 # hdu.header.add_comment("Image created by WFAU") 1265 1266 # Comment out if using cfitsio<3280 1267 if isPawPrint: 1268 if ii is 0: 1269 for key, value, desc in primaryCards: 1270 self.addCard(hdu.header, key, value, desc) 1271 1272 for key in delPrimKeys: 1273 if key in hdu.header: 1274 del hdu.header[key] 1275 hdu.header.add_comment("Image created by WFAU") 1276 else: 1277 extNum = ii + 1 1278 for key, value, desc in extensionCards[extNum]: 1279 self.addCard(hdu.header, key, value, desc) 1280 1281 for key in delExtKeys: 1282 if key in hdu.header: 1283 del hdu.header[key] 1284 else: 1285 for key in delPrimKeys: 1286 if key in hdu.header: 1287 del hdu.header[key] 1288 1289 for key, value, desc in primaryCards: 1290 self.addCard(hdu.header, key, value, desc) 1291 1292 for key, value, desc in extensionCards[2]: 1293 self.addCard(hdu.header, key, value, desc) 1294 1295 hdu.header.add_comment("Image created by WFAU") 1296 1297 # delete unwanted headers 1298 fitsFile.close() 1299 fits.removeDuplicateKeywords(filePath) 1300 # Make sure VSA_MFID,CASUVERS are correct 1301 fitsFile = fits.open(filePath, 'update') 1302 del fitsFile[0].header['VSA_TIME'] 1303 self.addCard(fitsFile[0].header, 'VSA_MFID', field.fileID, 1304 'VSA multiframeID') 1305 self.addCard(fitsFile[0].header, 'CASUVERS', primaryInfo.casuVers, 1306 'CASU software version') 1307 1308 1309 # 55% of total processing cost, but I/O limited - cannot parallelise 1310 fits.checksum([filePath]) 1311 os.rename(filePath, filePath.replace('.fz', '')) 1312 fits.compressFits([filePath.replace('.fz', '')]) 1313 os.rename(filePath.replace('.fz', ''), filePath) 1314 # @TODO: Eventually remove this - hack for mosaics 1315 if isMosaic: 1316 hdulist = fits.open(filePath, 'update') 1317 hdulist[1].header['ZEXTEND'] = False 1318 del hdulist[1].header['ZTENSION'] 1319 del hdulist[1].header['ZPCOUNT'] 1320 del hdulist[1].header['ZGCOUNT'] 1321 hdulist.close() 1322 # @FIXME: Inefficient 1323 fits.checksum([filePath]) 1324 if os.path.exists(filePath): 1325 os.rename(filePath, filePath.replace('.fz', '')) 1326 fits.compressFits([filePath.replace('.fz', '')]) 1327 os.rename(filePath.replace('.fz', ''), filePath) 1328 1329 self.delKeysDict[field.fileID] = delKeysList
1330 1331 #-------------------------------------------------------------------------- 1332
1333 - def outgestCatalogues(self, deepTiles):
1334 """ Outgest FITS files from detection table for each deep tile. 1335 """ 1336 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1337 os.path.join(self._fileDir, 'images'), 1338 fileType="cat") 1339 1340 detTable = self._progSchema[self.programme.getAttr('detectionTable')] 1341 columns = detTable.columns 1342 fileIdStr = "multiframeID" 1343 1344 excludedColumns = ['cx', 'cy', 'cz', 'htmid'] 1345 includedColumns = {'VIDEO': ['objid', 'filterid', 'x', 'y', 'ra', 'dec', 1346 'apermagnoapercorr3', 'apermag3err as apermagnoapercorr3err', 1347 'apermag3', 'apermag3err', 'apermagnoapercorr6', 1348 'apermag6err as apermagnoapercorr6err', 'apermag6', 'apermag6err', 1349 'kronmag as automag', 'kronmagerr as automagerr', 'halfrad', 1350 'petromag', 'petromagerr', 'errbits', 'classstat']} 1351 1352 if self.programme.getAcronym().upper() in includedColumns: 1353 # Order by 1354 aliasesDict = dict([(ii, (ic.lower().split(' as ')[1] 1355 if ' as ' in ic.lower() else '')) 1356 for ii, ic in enumerate(includedColumns[self.programme.getAcronym().upper()])]) 1357 incColumns = [] 1358 selectStrColNames = includedColumns[self.programme.getAcronym().upper()] 1359 for ii, ic in enumerate(includedColumns[self.programme.getAcronym().upper()]): 1360 col = [copy.deepcopy(column) for column in columns 1361 if column.name.lower() == ic.split()[0]].pop() 1362 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 1363 col.name = aliasesDict[ii] 1364 incColumns.append(col) 1365 1366 1367 columns = incColumns 1368 else: 1369 columns = [column for column in columns if column.name.lower() not in excludedColumns] 1370 selectStrColNames = [col.name for col in columns] 1371 1372 1373 if not self.releaseDB.isLoadDb: 1374 selectStr = ', '.join(map(str, selectStrColNames)) 1375 fromStr = detTable.name 1376 fileIdAlias = detTable.name 1377 else: 1378 fileIdAlias = 'R' 1379 joinList = [(self._progSchema[detTable.name + "Raw"], fileIdAlias), 1380 (self._progSchema[detTable.name + "Astrometry"], 'A'), 1381 (self._progSchema[detTable.name + "Photometry"], 'P')] 1382 1383 commonCols = utils.getDuplicates(utils.unpackList( 1384 map(str, table.columns) for table, _alias in joinList)) 1385 1386 # @TODO: More complicated, but is this ever used - needs fixing if so? 1387 selectStr = ', '.join( 1388 ('%s.%s' % (fileIdAlias, column) if str(column) in commonCols else 1389 str(column)) 1390 for column in self._progSchema[detTable.name].columns) 1391 1392 fromStr = Join([(self.releaseDB.tablePath(table.name), alias) 1393 for table, alias in joinList], 1394 detTable.primaryKey()) 1395 1396 if self.isQuickRun: 1397 selectStr = "TOP 10 " + selectStr 1398 1399 deepTiles = [deepTile for deepTile in deepTiles 1400 if self.sysc.confSuffix not in deepTile.fileName 1401 and not os.path.exists(fnFormat.getFilePath(deepTile))] 1402 Logger.addMessage("Creating catalogue files for %s deep tiles..." 1403 % len(deepTiles)) 1404 1405 progress = ForLoopMonitor(deepTiles) 1406 for deepTile in deepTiles: 1407 if self.esoProgReq.photSys == 'AB': 1408 vegaToAB = self.releaseDB.query( 1409 "vegaToAB", "Filter", "shortName='%s'" % deepTile.filterName, 1410 firstOnly=True) 1411 selectStrFinal = self.modifyToAB(selectStr, vegaToAB) 1412 else: 1413 selectStrFinal = selectStr 1414 filePath = fnFormat.getFilePath(deepTile) 1415 self.updateDirDict(filePath) 1416 query = SelectSQL(selectStrFinal, fromStr, 1417 where="%s.%s=%s AND %s.seqNum>0" 1418 % (fileIdAlias, fileIdStr, deepTile.fileID, fileIdAlias)) 1419 1420 # Run each file outgest as a separate process to avoid memory leaks 1421 outgest = Process(target=self.outgestCatalogueFile, 1422 args=(filePath, columns, query, deepTile)) 1423 outgest.start() 1424 outgest.join() 1425 if outgest.exitcode: 1426 raise EsoRelease.CuError("Forked outgest process failed." 1427 " Please check stdout.") 1428 1429 progress.testForOutput()
1430 1431 #-------------------------------------------------------------------------- 1432
1433 - def modifyToAB(self, selectStr, vegaToAB, filters=None):
1434 """ 1435 """ 1436 if not filters: 1437 parts = [] 1438 for part in selectStr.split(','): 1439 if 'mag' in part.lower() and 'err' not in part.lower(): 1440 if ' as ' in part.lower(): 1441 origName, alias = part.split(' as ') 1442 part = '(%s + %s) as %s' % (origName, vegaToAB, alias) 1443 else: 1444 part = '(%s + %s) as %s' % (part, vegaToAB, part) 1445 else: 1446 part = part 1447 parts.append(part) 1448 return ','.join(parts) 1449 else: 1450 # Source table 1451 parts = [] 1452 for part in selectStr.split(','): 1453 if 'mag' in part.lower() and 'err' not in part.lower(): 1454 # Mags 1455 for index, band in enumerate(filters): 1456 if band in part: 1457 if ' as ' in part.lower(): 1458 origName, alias = part.split(' as ') 1459 part = '(%s + %s) as %s' % (origName, vegaToAB[index], alias) 1460 else: 1461 part = '(%s + %s) as %s' % (part, vegaToAB[index], part) 1462 elif ('pnt' in part.lower() or 'ext'in part.lower()) and 'err' not in part.lower(): 1463 # cols 1464 primVtoAB = [vegaToAB[index] for index, band in enumerate(filters) if band + 'm' in part][0] 1465 secVtoAB = [vegaToAB[index] for index, band in enumerate(filters) if 'm' + band in part][0] 1466 part = '(%s + %s - %s) as %s' % (part, primVtoAB, secVtoAB, part) 1467 else: 1468 part = part 1469 parts.append(part) 1470 return ','.join(parts)
1471 1472 #-------------------------------------------------------------------------- 1473
1474 - def outgestCatalogueFile(self, filePath, columns, query, deepTile):
1475 """ Outgests a single ESO catalogue file product. 1476 """ 1477 # Prepare catalogue data 1478 fitsTable = self.queryFitsTable(columns, query) 1479 1480 # Metadata 1481 whereStr = query.whereStr.split(" AND ")[0].split('.', 1)[-1] 1482 primary = pyfits.PrimaryHDU() 1483 band = '' 1484 for hdu, metadataTables \ 1485 in [(primary, ["Multiframe", "MultiframeEsoKeys"]), 1486 (fitsTable, ["MultiframeDetector", "CurrentAstrometry"])]: 1487 1488 for tableName in metadataTables: 1489 for column, value \ 1490 in self.queryMetadata(self._metaSchema[tableName], whereStr): 1491 self.addKey(hdu, column, value) 1492 if "FILTER" in hdu.header: 1493 band = hdu.header["FILTER"] 1494 self.addStandardKeys(primary.header, fitsTable.header, [deepTile], 1495 [band], tableType='CAT', field=deepTile) 1496 for key in self.delKeysDict[deepTile.fileID]: 1497 if key in primary.header: 1498 del primary.header[key] 1499 if key in fitsTable.header: 1500 del fitsTable.header[key] 1501 # Write the FITS file 1502 pyfits.HDUList([primary, fitsTable]).writeto(filePath, checksum=False) 1503 fits.checksum([filePath])
1504 1505 #-------------------------------------------------------------------------- 1506
1507 - def queryFitsTable(self, columns, sql, tableType='NONE'):
1508 """ 1509 Outgest catalogue data corresponding to given field. 1510 1511 """ 1512 cppLimitDict = {'real':3.40282e+038, 'float':1.79769e+308} 1513 problemDataColumns = {"averageconf":[0, None], "meanmag":[0, None]} 1514 1515 dataNum = {} 1516 if 'MD' not in tableType: 1517 data = dict(zip(columns, 1518 zip(*self.releaseDB.query(sql.selectStr, sql.fromStr, sql.whereStr)))) 1519 for column in columns: 1520 if column.dataType == 'real' or column.dataType == 'float': 1521 da = numpy.array(data[column]) 1522 da = self.problemColumnCheck(da, column, problemDataColumns) 1523 da = numpy.where(numpy.abs(numpy.divide(numpy.subtract(da, dbc.realDefault()), dbc.realDefault())) < 0.0001, numpy.nan, da) 1524 dataNum[column] = numpy.where(numpy.abs(numpy.divide(numpy.subtract(numpy.abs(da), cppLimitDict[column.dataType]), cppLimitDict[column.dataType])) < 0.0001, numpy.nan, da) 1525 else: 1526 dataNum[column] = numpy.array(data[column]) 1527 return self.createTableHdu(columns, tableType, dataNum)
1528 1529 #-------------------------------------------------------------------------- 1530
1531 - def problemColumnCheck(self, da, column, problemDataColumns):
1532 """ Checks whether column is a problem column and replaces dodgy 1533 defaults with correct values 1534 """ 1535 1536 for colNamePart in problemDataColumns: 1537 if colNamePart in column.name.lower(): 1538 minDataValue, maxDataValue = problemDataColumns[colNamePart] 1539 if minDataValue is not None: 1540 da = numpy.where(da < minDataValue, dbc.realDefault(), da) 1541 if maxDataValue is not None: 1542 da = numpy.where(da > maxDataValue, dbc.realDefault(), da) 1543 return da
1544 1545 #-------------------------------------------------------------------------- 1546 1547
1548 - def getNearestFilter(self, filterName, possibleFilters):
1549 """ 1550 """ 1551 curFilterInfo = self.releaseDB.query("shortName, (cutOn+cutOff) as medLamb", 1552 "Filter", "shortName like '%s'" % filterName, 1553 firstOnly=True) 1554 filterInfo = self.releaseDB.query("shortName, (cutOn+cutOff) as medLamb", 1555 "Filter", "shortName in (%s)" % 1556 (','.join(["'%s'" % fName for fName in possibleFilters]))) 1557 absDev = [(fInfo.shortName, abs(fInfo.medLamb - curFilterInfo.medLamb)) 1558 for fInfo in filterInfo] 1559 1560 return sorted(absDev, key=itemgetter(1))[0][0]
1561 1562 #-------------------------------------------------------------------------- 1563
1564 - def outgestSources(self, isSourceList=False):
1565 """ Outgest FITS files from source table for each frame set. 1566 @TODO: (priOrSec = 0 || priOrSec=frameSetID) as primary_source 1567 VVV - simpler for DR2? Be careful. Just primary sources selected? 1568 """ 1569 1570 1571 self.catType = 'srcCat' 1572 columnsTypesExclude = ['PsfMag', 'PsfMagErr', 'SerMag2D', 'SerMag2DErr'] 1573 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 1574 sourceTable = self._progSchema[self.programme.getSourceTable()] 1575 varTable = self._progSchema.get(self.programme.getVariabilityTable()) 1576 # Main data query 1577 iauNameSel, iauNameAttr = self.getIAUNameColSel('s' 1578 if self.esoProgReq.fstEpchSrc == 1 else sourceTable.name) 1579 aliasesDict = {} 1580 # @TODO: Primary sources only (priOrSec=0 or priOrSec=frameSetID) 1581 filters = [col.name.replace("mfID", '') for col in mergeLog.columns 1582 if "mfID" in col.name] 1583 colours = [col.name.lower().replace("pnt", '') for col in sourceTable.columns 1584 if col.name.lower().endswith("pnt")] 1585 excludedColumns = ['cx', 'cy', 'cz', 'htmid'] 1586 # @TODO: For VIDEO - add in SExtractor half rad column into schema for next release 1587 1588 includedColumnsProg = {'VIDEO': 1589 {'merged': ['sourceid', 'ra', 'dec', 'mergedclass', 'ebv'], 1590 'colour': ['pnt', 'pnterr', 'ext', 'exterr'], 1591 'filter': ['petromag', 'petromagerr', 'apermag3', 'apermag3err', 1592 'apermag6', 'apermag6err', 'apermagnoapercorr3', 1593 'apermag3err as apermagnoapercorr3err', 1594 'apermagnoapercorr6', 'apermag6err as apermagnoapercorr6err', 1595 'errbits', 'classstat', 'kronmag as automag', 'kronmagerr as automagerr', 'halfrad']}} 1596 1597 includedColumns = [] 1598 if self.programme.getAcronym().upper() in includedColumnsProg: 1599 includedColumns = includedColumnsProg[self.programme.getAcronym().upper()]['merged'] 1600 for colour in colours: 1601 includedColumns.extend([colour.lower() + cType.lower() 1602 for cType in includedColumnsProg[self.programme.getAcronym().upper()]['colour']]) 1603 for band in filters: 1604 includedColumns.extend([self.addBand(cType, band) 1605 for cType in includedColumnsProg[self.programme.getAcronym().upper()]['filter']]) 1606 1607 for band in filters: 1608 excludedColumns.extend([band.lower() + cType.lower() for cType in columnsTypesExclude]) 1609 1610 if self.esoProgReq.fstEpchSrc == 1: 1611 # @TODO: Redo this for VVV DR2 plus, will be much more straightforward 1612 synSrcTable = self._progSchema[self.programme.getSynopticSourceTable()] 1613 mEpochFilters = self.esoProgReq.mEpochFilters.split(',') 1614 expectedOBMatches = self.esoProgReq.expOBMatches.split(';') 1615 obFilters = [ob.split(',') for ob in expectedOBMatches] 1616 if len(mEpochFilters) == 1 and len(mEpochFilters) < len(filters): 1617 # select frames! 1618 mEpochFilter = mEpochFilters[0] 1619 mEpochFilterID = self.releaseDB.query("filterID", "Filter", 1620 "shortName like '%s'" % mEpochFilter, firstOnly=True) 1621 sEpochFilters = [band for band in filters if band.lower() != mEpochFilter.lower()] 1622 1623 1624 nearestSEpchFilter = self.getNearestFilter(mEpochFilter, sEpochFilters) 1625 synMergeLogLookUpDict = defaultdict(dict) 1626 for band in filters: 1627 synMergeLogLookUpDict[band] = dict(self.releaseDB.query( 1628 "%smfID,synFrameSetID" % band, 1629 self.programme.getSynopticMergeLogTable(), 1630 "%smfID>0" % band)) 1631 1632 goodFrameSel = ("" if not self.completeFilters else 1633 ''.join([" AND %smfID<0" for band in sEpochFilters]) + 1634 " AND m.multiframeID>0") 1635 # select shallow Ks OBs 1636 1637 frames = self.releaseDB.query( 1638 "frameSetID,%s" % ','.join("%smfID" % flt for flt in filters), 1639 "%s as l,Multiframe as m" % self.programme.getMergeLogTable(), 1640 "l.%smfID=m.multiframeID and m.frameType not like " 1641 "'%%deep%%'%s" % (mEpochFilter, goodFrameSel)) 1642 1643 # Deep Ks select obsName not like '%v-%' 1644 selectStr = ("l.frameSetID,%s,m.multiframeID as %smfID" % 1645 (','.join(["%smfID" % flt for flt in sEpochFilters]), 1646 mEpochFilter)) 1647 fromStr = ("%s as l,Multiframe as md, ProgrammeFrame as p," 1648 "Multiframe as m,MultiframeDetector as mfd," 1649 "MultiframeEsoKeys as e, CurrentAstrometry as c" % 1650 self.programme.getMergeLogTable()) 1651 whereStr = ("l.ksmfID=md.multiframeID and md.frameType like " 1652 "'%%deep%%' and p.programmeID=%s and p.multiframeID=" 1653 "m.multiframeID and m.frameType='tilestack' and " 1654 "m.filterID=%s and m.multiframeID=e.multiframeID and " 1655 % (self.programmeID, mEpochFilterID) + 1656 "m.multiframeID=mfd.multiframeID and m.multiframeID=c.multiframeID and " 1657 "dbo.fGreatCircleDist(l.ra,l.dec,c.centralRa," 1658 "c.centralDec)<3 and (m.deprecated in (0,50,51) or " 1659 "(m.deprecated=100 and mfd.deprecated=0))") 1660 1661 frames += self.releaseDB.query( 1662 selectStr=selectStr, 1663 fromStr=fromStr, whereStr=whereStr + (" and e.obsName not like " 1664 "'%v-%'" + goodFrameSel)) 1665 obTileObsQuery = SelectSQL("frameSetID", fromStr, 1666 whereStr + (" and e.obsName not like '%v-%'")) 1667 # select nearest in time of others 1668 frames += self.releaseDB.query( 1669 selectStr=selectStr, 1670 fromStr="%s as l, Multiframe as m, (select l.frameSetID, " 1671 "min(m.mjdObs) as minMjdObs from %s as l, " 1672 "Multiframe as md, ProgrammeFrame as p, Multiframe as m, " 1673 "MultiframeEsoKeys as e, CurrentAstrometry as c, " 1674 "MultiframeDetector as mfd, Multiframe as ne where %s " 1675 "and ne.multiframeID=l.%smfID AND frameSetID not in (%s) " 1676 "group by l.frameSetID) as T" % ( 1677 self.programme.getMergeLogTable(), 1678 self.programme.getMergeLogTable(), 1679 whereStr + (" and e.obsName like '%v-%'"), nearestSEpchFilter, obTileObsQuery), 1680 whereStr="l.frameSetID = T.frameSetID and m.mjdObs = " 1681 "T.minMjdObs and m.frameType = 'tilestack'%s" % 1682 goodFrameSel) 1683 # frameTypeSel = queries.getFrameSelection('tile', deepOnly=True, 1684 # alias='m', selType='%stack') 1685 # Sort out any deep ZYJH 1686 replacementDict = dict(self.releaseDB.query( 1687 "distinct m.multiframeID,m2.multiframeID", 1688 "ProgrammeFrame as p,Multiframe as m,Provenance as v," 1689 "Provenance as v2,Provenance as v3, Multiframe as m2", 1690 "p.programmeID=%s and p.multiframeID=m.multiframeID and " 1691 "m.frameType like '%%tile%%deep%%stack' and m.filterID!=%s and " 1692 "m.multiframeiD=v.combiframeID and v.multiframeID=" 1693 "v2.combiframeID and v2.multiframeID=v3.multiframeID and " 1694 "v3.combiframeID=m2.multiframeID and m2.frameType " 1695 "like 'tilestack'" % (self.programmeID, mEpochFilterID))) 1696 1697 1698 finalFrames = [] 1699 for frameInfo in frames: 1700 fInfo = list(frameInfo) 1701 for index, _filtName in enumerate(sEpochFilters): 1702 if fInfo[index + 1] in replacementDict: 1703 fInfo[index + 1] = replacementDict[fInfo[index + 1]] 1704 finalFrames.append(fInfo) 1705 1706 1707 1708 # Sort out selection and columns 1709 # 1710 # Positional info 1711 basicSTSel = ("sourceID,frameSetID,ra,dec,l,b," 1712 "lambda,eta,priOrSec,mergedClassStat,mergedClass") 1713 # UNION - check at frame level... 1714 1715 colList = [] 1716 for ob in obFilters: 1717 for index, band in enumerate(ob): 1718 if index < len(ob) - 1: 1719 colList.append("ss%s.%sm%sPnt,ss%s.%sm%sPntErr" % 1720 (band.lower(), band.lower(), ob[index + 1].lower(), 1721 band.lower(), band.lower(), ob[index + 1].lower())) 1722 1723 1724 colourSel = ','.join(colList) 1725 # 1726 # bandPass attrs 1727 # 1728 # 1729 bandPassAttrs = ("ss%s.%sAperMag1,ss%s.%sAperMag1Err," 1730 "ss%s.%sAperMag3,ss%s.%sAperMag3Err,ss%s.%saperMag4," 1731 "ss%s.%saperMag4Err,ss%s.%sGausig,ss%s.%sPA," 1732 "ss%s.%sEll,ss%s.%sppErrBits,ss%s.%sAverageConf," 1733 "ss%s.%sSeqNum,ss%s.%sXi,ss%s.%sEta") 1734 bandPassList = [attr.replace('ss%s.%s', '').lower() 1735 for attr in bandPassAttrs.split(',')] 1736 columns = [iauNameAttr] 1737 srcColumns = [column for column in sourceTable.columns 1738 if column.name in basicSTSel] 1739 fromStr = "%s as s" % (self.programme.getSourceTable()) # ,self.programme.getMergeLogTable()) 1740 whereStr = "" # "s.frameSetID=l.frameSetID" 1741 colourColumns = [] 1742 for colTerms in colList: 1743 colAttrs = [attr.split('.')[1].lower() for attr in colTerms.split(',')] 1744 colourColumns.extend([column for column in synSrcTable.columns 1745 if column.name.lower() in colAttrs]) 1746 synSrcColumns = [] 1747 bandSel = "" 1748 for band in filters: 1749 # Some modifications required... 1750 bandNameLength = len(band) 1751 1752 # Get order correct? 1753 columns = [] 1754 for name in bandPassList: 1755 columns += [column for column in synSrcTable.columns 1756 if column.name.lower().startswith(band.lower()) and 1757 column.name[bandNameLength:].lower() == name] 1758 synSrcColumns.extend(columns) 1759 bandSel += ',' + bandPassAttrs % tuple([band for _index in range(28)]) 1760 fromStr += ",%sXSynopticSourceBestMatch as bm%s" % ( 1761 self.programme.getSourceTable(), band) 1762 fromStr += ",%s as ss%s" % ( 1763 self.programme.getSynopticSourceTable(), band) 1764 fromStr += ",%s as sl%s " % ( 1765 self.programme.getSynopticMergeLogTable(), band) 1766 whereStr += ("s.sourceID=bm%s.sourceID AND " 1767 "bm%s.synFrameSetID=sl%s.synFrameSetID AND " 1768 "bm%s.synFrameSetID=ss%s.synFrameSetID AND " 1769 "bm%s.synSeqNum=ss%s.synSeqNum AND " % 1770 tuple([band for index in range(7)])) 1771 1772 varClassCol = "variableClass" 1773 varAttr = varTable.attribute[varClassCol] 1774 varAttr.name = "VARFLAG" 1775 columns = [iauNameAttr] + srcColumns + colourColumns + synSrcColumns + [varAttr] 1776 selectStr = (iauNameSel + ', ' + 1777 ', '.join("s.%s" % column for column in srcColumns) + 1778 ', ' + colourSel + bandSel) 1779 # ', '.join("ss1.%s" % (column) 1780 # for column in synSrcColumns1) + ', ' + 1781 # ', '.join("%ss2.%s" % (column) 1782 # for column in synSrcColumns2)) 1783 selectStr += ", ISNULL(%s, %s) AS %s" \ 1784 % (varClassCol, dbc.intDefault(), varAttr) 1785 fromStr += ",%s as v" % self.programme.getVariabilityTable() 1786 whereStr += "v.sourceID=s.sourceID" 1787 else: 1788 if self.programme.getAcronym().upper() in includedColumnsProg: 1789 aliasesDict = dict([(ii, (ic.lower().split(' as ')[1] 1790 if ' as ' in ic.lower() else '')) 1791 for ii, ic in enumerate(includedColumns)]) 1792 nonNullSTColumns = [] 1793 selectStrColNames = includedColumns 1794 for ii, ic in enumerate(includedColumns): 1795 col = [copy.deepcopy(column) for column in sourceTable.columns if column.name.lower() == ic.split()[0]].pop() 1796 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 1797 1798 col.name = aliasesDict[ii] 1799 nonNullSTColumns.append(col) 1800 else: 1801 nonNullSTColumns = [column for column in sourceTable.columns 1802 if column.name.lower() not in excludedColumns] 1803 selectStrColNames = [column.name for column in nonNullSTColumns] 1804 1805 1806 if not self.esoProgReq.incVariables: 1807 columns = [iauNameAttr] + nonNullSTColumns 1808 selectStr = iauNameSel + ', ' + ', '.join(selectStrColNames) 1809 fromStr = sourceTable.name 1810 whereStr = "" 1811 else: 1812 varClassCol = "variableClass" 1813 varAttr = varTable.attribute[varClassCol] 1814 varAttr.name = "VARFLAG" 1815 columns = [iauNameAttr] + nonNullSTColumns + [varAttr] 1816 1817 # Handle duplicate column names 1818 selectStr = iauNameSel + ', ' + ', '.join(("%s.%s" % (sourceTable.name, colName) 1819 if colName.split()[0] in [col.name for col in varTable.columns] else 1820 colName) 1821 for colName in selectStrColNames) 1822 1823 selectStr += ", ISNULL(%s, %s) AS %s" \ 1824 % (varClassCol, dbc.intDefault(), varAttr) 1825 # @TODO: More complicated if SynopticSource. 1826 # if VMC - first epoch. 1827 # if VVV ZY epoch + JHK epoch. 1828 fromStr = PkLeftJoin(sourceTable, varTable) 1829 whereStr = "" 1830 1831 if not isSourceList: 1832 raIndex = [index for index, column in enumerate(columns) 1833 if column.name == 'ra'][0] 1834 decIndex = [index for index, column in enumerate(columns) 1835 if column.name == 'dec'][0] 1836 columns[raIndex].name = 'ra2000' 1837 columns[decIndex].name = 'dec2000' 1838 if self.isQuickRun: 1839 selectStr = "TOP 10 " + selectStr 1840 1841 1842 1843 # sIDIndex = [index for index, column in enumerate(columns) 1844 # if column.name == 'sourceID'][0] 1845 # sIDtag = columns[sIDIndex].tag 1846 # for key in sIDtag: 1847 # if key == '--/C': 1848 # sIDtag[key] = 'meta.id' 1849 # columns[sIDIndex].tag = sIDtag 1850 if self.esoProgReq.photSys == 'AB': 1851 vegaToAB = self.releaseDB.query( 1852 "vegaToAB", "Filter", "shortName in (%s) order by filterID" 1853 % ','.join(["'%s'" % shtName for shtName in filters])) 1854 1855 selectStr = self.modifyToAB(selectStr, vegaToAB, filters=filters) 1856 1857 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1858 os.path.join(self._fileDir, 'images') if isSourceList else 1859 os.path.join(self._fileDir, "cat"), 1860 fileType="srcCat" if isSourceList else "finalSourceCat", 1861 filters=''.join(filters)) 1862 if self.fieldIDs: 1863 # @TODO: convert to correct list 1864 1865 # 1866 1867 frameSetIDs = ','.join( 1868 str(queries.getFrameSetID(self.releaseDB, fieldID, self.programme)) 1869 for fieldID in self.fieldIDs) 1870 1871 # Work through each file by fileID 1872 fileIdStr = mergeLog.primaryKey() 1873 fields = self.releaseDB.query( 1874 selectStr=fileIdStr + " AS fileID, ra*24/360 AS ra, dec", 1875 fromStr=mergeLog, 1876 whereStr=DepCodes.selectNonDeprecated 1877 + (" AND frameSetID IN (%s)" % frameSetIDs 1878 if self.fieldIDs else ""), 1879 orderBy=fileIdStr) 1880 # Cover the case of continuing from an interruption 1881 1882 self.srcCatFieldDict = dict([(field.fileID, fnFormat.getFilePath(field)) 1883 for field in fields]) 1884 self.fieldInfoDict = {} 1885 for field in fields: 1886 if self.esoProgReq.fstEpchSrc == 1: 1887 frameInfo = [fInfo for fInfo in finalFrames if fInfo[0] == field.fileID][0] 1888 fieldInfo = self.getFieldInfo(field, frameInfo, filters) 1889 self.fieldInfoDict[field.fileID] = fieldInfo 1890 else: 1891 self.fieldInfoDict[field.fileID] = None 1892 # @TODO: This makes metadata file incorrect 1893 # fields = [field for field in fields 1894 # if not os.path.exists(fnFormat.getFilePath(field))] 1895 1896 Logger.addMessage("Creating source table files for %s frame sets..." 1897 % len(fields)) 1898 1899 if not isSourceList: 1900 utils.ensureDirExist(os.path.join(self._fileDir, "cat")) 1901 1902 notVarFrameSetsList = self.releaseDB.query("frameSetID", 1903 self.programme.getMergeLogTable(), 1904 "frameSetID NOT IN (select frameSetID from %s)" % 1905 self.programme.getVarFrameSetInfoTable()) 1906 1907 fields = [] if self.piOnly else fields 1908 progress = ForLoopMonitor(fields) 1909 self.magLim = defaultdict(list) 1910 self.nPointings = len(fields) 1911 for field in fields: 1912 self.regionDict[field.fileID] = (self.getRegion(field.fileID) 1913 if self.isRegion else 1914 None) 1915 skipField = False 1916 if field.fileID in notVarFrameSetsList: 1917 skipField = True 1918 1919 if self.isRegion: 1920 fnFormat = FileNameFormat(self.programme, self.releaseNum, 1921 os.path.join(self._fileDir, 'images') if isSourceList else 1922 os.path.join(self._fileDir, "cat/%s" % self.regionDict[field.fileID][0]), 1923 fileType="srcCat" if isSourceList else "finalSourceCat", 1924 filters=''.join(filters)) 1925 utils.ensureDirExist(os.path.join(self._fileDir, "cat/%s" % 1926 self.regionDict[field.fileID][0])) 1927 1928 filePath = fnFormat.getFilePath(field) 1929 self.updateDirDict(filePath) 1930 selStr = selectStr 1931 frStr = fromStr 1932 whStr = whereStr 1933 srcConst = (" AND %s" % self.esoProgReq.srcConstraints 1934 if self.esoProgReq.srcConstraints != 'NONE' else "") 1935 query = SelectSQL(selStr, frStr, 1936 where="%s.%s=%s%s" % (sourceTable, fileIdStr, field.fileID, 1937 srcConst)) 1938 # 1939 mfIDList = [] 1940 if self.esoProgReq.fstEpchSrc == 1: 1941 finalWhereStr = whStr 1942 frameInfo = [fInfo for fInfo in finalFrames if fInfo[0] == field.fileID][0] 1943 synMLList = [] 1944 1945 for index, band in enumerate(filters): 1946 if frameInfo[index + 1] > 0: 1947 finalWhereStr += " AND sl%s.%smfID=%s" % (band, band, frameInfo[index + 1]) 1948 if frameInfo[index + 1] in synMergeLogLookUpDict[band]: 1949 synMLList.append(synMergeLogLookUpDict[band][frameInfo[index + 1]]) 1950 mfIDList.append(frameInfo[index + 1]) 1951 else: 1952 Logger.addMessage("Skipping field, due to recalibration-deprecation issue") 1953 synMLList.append(dbc.intDefault()) 1954 mfIDList.append(dbc.intDefault()) 1955 skipField = True 1956 else: 1957 synFrameSetID = self.releaseDB.query( 1958 "synFrameSetID", 1959 "%s as sl,%s as l" % (self.programme.getSynopticMergeLogTable(), 1960 self.programme.getMergeLogTable()), 1961 "l.frameSetID=%s AND dbo.fGreatCircleDist(" 1962 "l.ra,l.dec,sl.ra,sl.dec)<3 AND sl.%smfID=%s" 1963 % (field.fileID, band, dbc.intDefault()), firstOnly=True) 1964 1965 finalWhereStr += (" AND sl%s.synFrameSetID=%s" % (band, synFrameSetID)) 1966 synMLList.append(dbc.intDefault()) 1967 mfIDList.append(dbc.intDefault()) 1968 1969 if skipField: 1970 self.skipFieldList.append(field.fileID) 1971 for obIndex, ob in enumerate(obFilters): 1972 for index, band in enumerate(ob): 1973 if index < len(ob) - 1: 1974 filtIndex = getFiltIndex(obFilters, obIndex, index) 1975 if synMLList[filtIndex] != synMLList[filtIndex + 1]: 1976 selStr.replace("ss%s.%sm%sPnt" % (band, band, ob[index + 1]), 1977 "%s as %sm%sPnt" % (dbc.realDefault(), band, ob[index + 1])) 1978 selStr.replace("ss%s.%sm%sPntErr" % (band, band, ob[index + 1]), 1979 "%s as %sm%sPntErr" % (dbc.realDefault(), band, ob[index + 1])) 1980 1981 query = SelectSQL(selStr, frStr, 1982 where=finalWhereStr + " AND s.frameSetID=%s" % (field.fileID)) 1983 # Run each file outgest as a separate process to avoid memory leaks 1984 if not os.path.exists(filePath) and not skipField and not self.skipSources: 1985 outgest = Process(target=self.outgestSourceFile, 1986 args=(filePath, columns, query, field, filters, 1987 'SL' if isSourceList else 'SFSC')) 1988 1989 outgest.start() 1990 outgest.join() 1991 if outgest.exitcode: 1992 raise EsoRelease.CuError("Forked outgest process failed." 1993 " Please check stdout.") 1994 1995 if not skipField: 1996 self.appendMagLims2(field.fileID, mergeLog, mfIDList) 1997 # self.appendMagLims(filePath) 1998 progress.testForOutput() 1999 if not isSourceList and not self.isRegion: 2000 query = SelectSQL(selectStr, fromStr, whereStr) 2001 fnFormatMD = FileNameFormat(self.programme, self.releaseNum, 2002 os.path.join(self._fileDir, "cat"), fileType="catMetaData", 2003 filters=''.join(filters)) 2004 filePath = fnFormatMD.getFilePath(None) 2005 self.updateDirDict(filePath) 2006 outgest = Process(target=self.outgestSourceFile, 2007 args=(filePath, columns, query, None, filters, 'SMD')) 2008 if not os.path.exists(filePath) or self.piOnly: 2009 outgest.start() 2010 outgest.join()
2011 #-------------------------------------------------------------------------- 2012
2013 - def addBand(self, cType, band):
2014 """ Adds filter name to name and alias 2015 """ 2016 return ' as '.join([band.lower() + part for part in cType.lower().split(' as ')])
2017 2018 #-------------------------------------------------------------------------- 2019
2020 - def updateAliasedColName(self, columns, aliasesDict):
2021 """ If a column is aliased in the select str, change the column name 2022 """ 2023 2024 2025 for ii, _column in enumerate(columns): 2026 if ii in aliasesDict and len(aliasesDict[ii]) > 0: 2027 columns[ii].name = aliasesDict[ii] 2028 return columns
2029 2030 2031 #-------------------------------------------------------------------------- 2032 2033
2034 - def appendMagLims2(self, frameSetID, mergeLog, mfIDList):
2035 """ Append magnitude limits to self.magLim 2036 """ 2037 if not mfIDList: 2038 columns = [col.name for col in mergeLog.columns 2039 if "mfID" in col.name] 2040 mfIDList = self.releaseDB.query(','.join(columns), mergeLog.name, 2041 "frameSetID=%s" % frameSetID, firstOnly=True) 2042 2043 for ii, mfID in enumerate(mfIDList): 2044 index = ii + 1 2045 # getFilter, get abMagLim 2046 if mfID > 0: 2047 magLim = self.magLimSatDict[mfID][0] 2048 if magLim > 0: 2049 self.magLim[index] = magLim
2050 #-------------------------------------------------------------------------- 2051
2052 - def appendMagLims(self, filePath):
2053 """ Append magnitude limits to self.magLim 2054 """ 2055 hdulist = fits.open(filePath) 2056 index = 1 2057 while 'MAGLIM%s' % index in hdulist[0].header: 2058 magLim = hdulist[0].header['MAGLIM%s' % index] 2059 if magLim > 0: 2060 self.magLim[index].append(magLim) 2061 index += 1
2062 2063 #-------------------------------------------------------------------------- 2064
2065 - def updateDirDict(self, filePath):
2066 """ Updates directory dict if need be 2067 """ 2068 directory = os.path.dirname(filePath).split(self._fileDir)[1] 2069 if not directory in self.directoryDict: 2070 self.directoryDict[directory] = os.path.dirname(filePath)
2071 2072 #-------------------------------------------------------------------------- 2073
2074 - def getRegion(self, frameSetID=None, raDec=None, useFrameSetID=True):
2075 """ Select info from Multiframe 2076 """ 2077 2078 if useFrameSetID: 2079 fieldID = queries.getFieldID(self.releaseDB, frameSetID, self.programme) 2080 elif raDec: 2081 productType = 'mosaic' if self.areMosaics else 'tile' 2082 fieldID = self.releaseDB.query("fieldID", "Required%s" % productType, 2083 "dbo.fGreatCircleDist(ra,dec,%s,%s)<0.5 and programmeID=%s" 2084 % ((15 * raDec[0]), raDec[1], self.programmeID), firstOnly=True) 2085 2086 return (self.regionFieldIDDict[fieldID], self.regionPartsFieldIDDict[fieldID])
2087 #-------------------------------------------------------------------------- 2088
2089 - def getIAUNameColSel(self, alias, radp=None, decdp=None, 2090 ucdTag='meta.id'):
2091 """ Returns the IAU Name column and selectStr 2092 """ 2093 2094 radp = radp or EsoRelease.radp 2095 decdp = decdp or EsoRelease.decdp 2096 2097 iauNameSel = ("dbo.fIAUNameGen('%s',%s.ra,%s.dec,%s,%s)" % 2098 (self.programme.getAcronym().upper(), alias, alias, radp, decdp)) 2099 # name datatype column 2100 charLength = 22 + radp + decdp + len(self.programme.getAcronym()) 2101 iauNameAttr = schema.Attribute() 2102 iauNameAttr.name = 'IAUNAME' 2103 iauNameAttr.dataType = 'varchar(%s)' % charLength 2104 iauNameAttr.tag = {'--/U':'', '--/C': ucdTag, 2105 '--/D':'IAU Name (not unique)', '--/B':''} 2106 return iauNameSel, iauNameAttr
2107 2108 #--------------------------------------------------------------------------
2109 - def getFieldInfo(self, field, frameInfo, filters):
2110 """ returns a new namedtuple 2111 """ 2112 fieldString = 'fileID ra dec ' + ' '.join(["%smfID" % band for band in filters]) 2113 2114 ExtendedField = namedtuple('ExtendedField', fieldString) 2115 fieldList = [field.fileID, field.ra, field.dec] 2116 fieldList.extend([frameInfo[index + 1] for index, _band in enumerate(filters)]) 2117 return ExtendedField(*fieldList)
2118 #--------------------------------------------------------------------------
2119 - def getInfoForPIs(self, filePath, columns):
2120 """ 2121 """ 2122 outputDir = os.path.join(self._fileDir, 'piInfo') 2123 utils.ensureDirExist(outputDir) 2124 outFileName = os.path.join(outputDir, 2125 os.path.basename(filePath.replace('.fits', '.txt'))) 2126 outputLines = ['# Column definitions\n', '# Name; format; description\n'] 2127 for column in columns: 2128 outputLines.append('%s; %s; %s\n' % (column.name, 2129 EsoRelease.sqlToFitsDataType[column.dataType], 2130 column.tag['--/D'])) 2131 file(outFileName, 'w').writelines(outputLines)
2132 2133 #-------------------------------------------------------------------------- 2134
2135 - def outgestSourceFile(self, filePath, columns, query, field, bandList, 2136 tableType='NONE'):
2137 """ Outgests a single ESO source catalogue file product. 2138 """ 2139 # @FIXME - bring provenance in here 2140 # @FIXME - reduce number of queries and control structures.. 2141 2142 # Prepare catalogue data 2143 2144 if 'MD' in tableType or self.isRegion: 2145 self.getInfoForPIs(filePath, columns) 2146 if self.piOnly: 2147 return 2148 # @TODO: change - if external and no neighbour table.... 2149 if self.checkifExternal() and not self.hasNeighTable: 2150 # @TODO: Must modify this. 2151 fitsTable = self.convertExternalCats(columns, field, tableType, query) 2152 else: 2153 fitsTable = self.queryFitsTable(columns, query, tableType) 2154 # Metadata 2155 primary = pyfits.PrimaryHDU() 2156 2157 # Add links 2158 if 'cat' not in filePath and ('MD' in tableType or 'FSC' in tableType): 2159 self.addCatalogueLinks(fitsTable, columns, field) 2160 2161 2162 # @FIXME: If multi-epoch .... 2163 self.missingAdpFiles = False 2164 self.updateHeaders(tableType, field, primary, fitsTable, bandList) 2165 if self.missingAdpFiles: 2166 # If missing ADP files - don't create fits table 2167 return 2168 2169 del primary.header["VSA_MFID"] 2170 # @TODO: is this just for VIDEO or others too? 2171 # if 'FSC' in tableType: 2172 # filterName = os.path.basename(filePath).split('_')[3] 2173 # catType = os.path.basename(filePath).split('_')[4] 2174 # self.addCard(primary.header, "PROV1", 2175 # os.path.basename(filePath.replace(catType, 'srcCat').replace(filterName, ''.join(filters))), 2176 # "Originating image file") 2177 2178 # Write the FITS file 2179 pyfits.HDUList([primary, fitsTable]).writeto(filePath, checksum=True)
2180 2181 #-------------------------------------------------------------------------- 2182
2183 - def updateHeaders(self, tableType, field, primary, fitsTable, bandList):
2184 """ 2185 """ 2186 if tableType == 'SFSC' or tableType == 'SMD': 2187 # Merged -band 2188 self.updateMergedBandHeaders(tableType, field, primary, fitsTable, bandList) 2189 elif tableType == 'MFSC' or tableType == 'MMD': 2190 # Multi - epoch single band 2191 self.updateMPhotHeaders(tableType, field, primary, fitsTable, bandList) 2192 elif tableType == 'VFSC' or tableType == 'VMD': 2193 # Variability - multi-epoch (several bands) - point to MPHOT 2194 self.updateVarCatHeaders(tableType, field, primary, fitsTable, bandList) 2195 elif tableType == 'EFSC' or tableType == 'EMD': 2196 # External - merged band or multi-epoch 2197 self.updateMergedBandHeaders(tableType, field, primary, fitsTable, bandList)
2198 #-------------------------------------------------------------------------- 2199
2200 - def updateMergedBandHeaders(self, tableType, field, primary, fitsTable, bandList):
2201 """ 2202 """ 2203 # Query MergeLog for passband-specific metadata 2204 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 2205 isPassband = False 2206 mfID = dbc.intDefault() 2207 mfIDs = [] 2208 2209 # filters = [col.name.replace("mfID", '') for col in mergeLog.columns 2210 # if "mfID" in col.name] 2211 if 'MD' not in tableType: 2212 whereStr = "frameSetID=%s" % field.fileID 2213 fieldInfo = self.fieldInfoDict[field.fileID] 2214 else: 2215 whereStr = "" 2216 if self.fieldIDs: 2217 # @TODO: convert to correct list 2218 2219 # 2220 fSetIDs = [queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2221 for fieldID in self.fieldIDs] 2222 frameSetIDs = ','.join([str(fSetID) for fSetID in fSetIDs if fSetID not in self.skipFieldList]) 2223 self.nPointings = len(frameSetIDs.split(',')) 2224 whereStr = "frameSetID in (%s)" % frameSetIDs 2225 fieldInfo = None 2226 2227 2228 2229 2230 for column, value in self.queryMetadata(mergeLog, whereStr): 2231 isPassband = isPassband or "mfid" in column.name.lower() 2232 if not isPassband: 2233 self.addKey(primary, column, value) 2234 2235 elif "mfid" in column.name.lower(): 2236 passband = column.name[:-len("mfid")] 2237 # @TODO: No longer necessary 2238 if (tableType == 'MMD' or tableType == 'MFSC' and passband != self.band.lower()): 2239 mfID = dbc.intDefault() 2240 else: 2241 mfID = value 2242 if self.esoProgReq.fstEpchSrc == 1 and 'MD' not in tableType: 2243 mfID = getattr(fieldInfo, "%smfID" % passband) 2244 elif "enum" in column.name.lower() and mfID != dbc.intDefault(): 2245 eNum = value 2246 mfIDs.append(mfID) 2247 2248 self.addPbMetadata(primary, passband, 2249 self._metaSchema["Multiframe"], mfID) 2250 2251 self.addPbMetadata(primary, passband, 2252 self._metaSchema["MultiframeEsoKeys"], mfID) 2253 2254 self.addPbMetadata(fitsTable, passband, 2255 self._metaSchema["MultiframeDetector"], mfID, eNum) 2256 2257 self.addPbMetadata(fitsTable, passband, 2258 self._metaSchema["CurrentAstrometry"], mfID, eNum) 2259 # @TODO: temporary 2260 if 'MD' not in tableType: 2261 fitsTable.header.update("HIERARCH %s_ABMAGLIM" % passband.upper(), 2262 self.magLimSatDict[mfID][0], "AB magnitude limit") 2263 fitsTable.header.update("HIERARCH %s_ABSATMAG" % passband.upper(), 2264 self.magLimSatDict[mfID][1], "AB saturation limit") 2265 2266 # if len(mfIDs) is 1 and tableType == 'SFSC': 2267 # Logger.addMessage("<Info> Frame set contains only a single frame" 2268 # " for file %s. Skipping..." % os.path.basename(filePath)) 2269 2270 # return 2271 if 'MD' in tableType: 2272 # select all mfIDs 2273 mfIDs = [] 2274 for rowData in self.queryMetadata(mergeLog, whereStr, allRows=True): 2275 mfIDs += [value for column, value in rowData 2276 if "mfid" in column.name.lower()] 2277 frames = utils.arbSort(self.queryFileProducts(mfIDs), mfIDs, 2278 key=attrgetter('fileID')) 2279 self.addPrimaryFrameKeys(primary, frames[0]) 2280 self.addStandardKeys(primary.header, fitsTable.header, frames, bandList, 2281 tableType, field)
2282 2283
2284 - def updateMPhotHeaders(self, tableType, field, primary, fitsTable, bandList):
2285 """ 2286 """ 2287 band = bandList[0] 2288 # @FIXME = get mfID list earlier when data is generated.... 2289 2290 # Query MergeLog for passband-specific metadata 2291 mergeLog = self._progSchema[self.programme.getMergeLogTable()] 2292 isCorrel = 'SynopticSource' in self.programme.getSynopticBestMatchTable() 2293 # filters = [col.name.replace("mfID", '') for col in mergeLog.columns 2294 # if "mfID" in col.name] 2295 if 'MD' not in tableType: 2296 fieldStr = "frameSetID=%s" % field.fileID 2297 fieldInfo = self.fieldInfoDict[field.fileID] 2298 else: 2299 fieldStr = "" 2300 if self.fieldIDs: 2301 # @TODO: convert to correct list 2302 2303 # 2304 fSetIDs = [queries.getFrameSetID(self.releaseDB, fieldID, self.programme) 2305 for fieldID in self.fieldIDs] 2306 frameSetIDs = ','.join([str(fSetID) for fSetID in fSetIDs if fSetID not in self.skipFieldList]) 2307 self.nPointings = len(frameSetIDs.split(',')) 2308 fieldStr = "frameSetID in (%s)" % frameSetIDs 2309 fieldInfo = None 2310 2311 isPassband = False 2312 for column, value in self.queryMetadata(mergeLog, fieldStr): 2313 isPassband = isPassband or "mfid" in column.name.lower() 2314 if not isPassband: 2315 self.addKey(primary, column, value) 2316 2317 # Get all epoch images from best-match table 2318 # "fileName fileID ra dec filterName frameType confID productID" 2319 2320 selectStr = ("distinct fileName,m.multiframeID,%s as ra,%s as dec,m.filterName," 2321 "m.frameType,m.confID,%s as productID" % (dbc.realDefault(), dbc.realDefault(), 2322 dbc.intDefault())) 2323 2324 2325 if isCorrel: 2326 fromStr = ("%s as b,%s as ml,Multiframe as m,%s as s,%s as l" % 2327 (self.programme.getSynopticBestMatchTable(), 2328 self.programme.getSynopticMergeLogTable(), 2329 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2330 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2331 "s.priOrSec=s.frameSetID) and b.synFrameSetID=ml.synFrameSetID and " 2332 "b.synFrameSetID>0 and ml.%smfID=m.multiframeID and l.frameSetID=" 2333 "s.frameSetID and m.multiframeID>0" % (fieldStr, band)) 2334 2335 if self.isCutOut: 2336 fromStr += (",(select frameSetID,synFrameSetID from %s as l1,%s as sl1 where " 2337 "dbo.fGreatCircleDist(l1.ra,l1.dec,sl1.ra,sl1.dec)<4) as mf" % 2338 (self.programme.getMergeLogTable(), self.programme.getSynopticMergeLogTable())) 2339 whereStr += (" and l.frameSetID=mf.frameSetID and mf.synFrameSetID=ml.synFrameSetID") 2340 2341 else: 2342 fromStr = ("%s as b,Multiframe as m,Filter as f,%s as s, %s as l" % 2343 (self.programme.getSynopticBestMatchTable(), 2344 self.programme.getSourceTable(), self.programme.getMergeLogTable())) 2345 whereStr = ("s.sourceID=b.sourceID and s.%s and (s.priOrSec=0 or " 2346 "s.priOrSec=s.frameSetID) and b.multiframeID=m.multiframeID and " 2347 "m.filterID=f.filterID and f.shortName='%s' and l.frameSetID=" 2348 "s.frameSetID" % (fieldStr, band)) 2349 if self.isCutOut: 2350 fromStr += (",(select frameSetID,c.multiframeID from %s as l1,CurrentAstrometry as c," 2351 "ProgrammeFrame as p,Multiframe as m where p.programmeID=%s and " 2352