Package invocations :: Package cu13 :: Module cu13
[hide private]

Source Code for Module invocations.cu13.cu13

   1  #! /usr/bin/env python 
   2  #------------------------------------------------------------------------------ 
   3  #$Id: cu13.py 10077 2013-09-20 14:51:03Z RossCollins $ 
   4  """ 
   5     Invoke CU13 to create required products. 
   6   
   7     @author: N.J.G. Cross 
   8     @org:    WFAU, IfA, University of Edinburgh 
   9   
  10     @newfield contributors: Contributors, Contributors (Alphabetical Order) 
  11     @contributors: R.S. Collins, E. Sutorius 
  12  """ 
  13  #------------------------------------------------------------------------------ 
  14  from   __future__      import division, print_function 
  15  from   future_builtins import map, zip 
  16   
  17  from   collections import defaultdict, namedtuple 
  18  import math 
  19  import multiprocessing 
  20  import numpy 
  21  from   operator    import attrgetter, itemgetter 
  22  import os 
  23  import pyfits 
  24  from   Scientific.Functions import LeastSquares as LSQ 
  25  import shutil 
  26  import time 
  27   
  28  import wsatools.Astrometry              as astro 
  29  import wsatools.Calibration             as calib 
  30  from   wsatools.CLI                 import CLI 
  31  import wsatools.CSV                     as csv 
  32  import wsatools.DataFactory             as df 
  33  import wsatools.DbConnect.CommonQueries as queries 
  34  from   wsatools.DbConnect.CuSession import CuSession 
  35  import wsatools.DbConnect.DbConstants   as dbc 
  36  from   wsatools.DbConnect.DbSession import Join, SelectSQL 
  37  import wsatools.DbConnect.Schema        as schema 
  38  import wsatools.ExternalProcess         as extp 
  39  import wsatools.FitsUtils               as fits 
  40  from   wsatools.Logger              import Logger, ForLoopMonitor 
  41  import wsatools.ProvenanceUtils         as pu 
  42  from   wsatools.SExtractorInterface import SExtractorInterface 
  43  import wsatools.Statistics              as stats 
  44  from   wsatools.SystemConstants     import DepCodes, SystemConstants 
  45  import wsatools.Utilities               as utils 
46 #------------------------------------------------------------------------------ 47 48 -class Cu13(CuSession):
49 """ 50 Creates deep stacks and confidence images from intermediate stacks 51 derived from MSB observations. The deep stacks are described in the 52 RequiredStack table and have a UID comprising the programmeID and 53 productID. The deep stacks are produced using the CASU fitsio_stack 54 code. Various options allow the operator to choose which stacks to 55 produce and how many intermediate stacks to use. The database release 56 number (DR#) defines the directory where the mosaic will be found. 57 58 """ 59 60 # @TODO: Stacking etc for OSA... 61 # @TODO: Testing - seeing, ZP, class ..... 62 # 63 64 #-------------------------------------------------------------------------- 65 # Class data types 66 Pointing = namedtuple("Pointing", "index ra dec filterID dualID") 67 ProductDetails = namedtuple("ProductDetails", 68 "productID multiframeID imageName confName catName") 69 70 #-------------------------------------------------------------------------- 71 # Class constants (access as Cu13.varName) 72 cuNum = 13 73 74 #: Airmass value for FITS products (must be a float). 75 airmass = 1.0 76 77 #: External zeropoint accuracy in mags 78 externalZPAccuracy = 0.01 79 80 #: Default aperture; 3 by default, 81 # 7 for UDS. 82 aperture = defaultdict(lambda : 3, UDS=7) 83 84 #: Extinction value for FITS products (must be a float). 85 extinct = 0.0 86 87 #: Maximum amount of memory that CU13 should consume in kilobytes. 88 memoryLimit = 0.5 * utils.getSystemMemoryKb() 89 90 #: ? 91 numObjsBin = 100 92 93 #: DetectionTableLargeSizeLimit 94 largeDetTableLimit = 1.0e9 95 96 # Private class parameters - should not be altered 97 _autoCommit = True 98 _isPersistent = True 99 _reqDbLock = False 100 _useWorkDir = True 101 102 #-------------------------------------------------------------------------- 103 # Public member variable default values (access as obj.varName) 104 105 # Configurations: override the defaults to change script behaviour 106 casuSoftDir = None #: CASU software directory 107 filterer = "nebuliser" #: Image filtering program, e.g. nebuliser. 108 stacker = "imstack" #: Image stacker program, e.g. fitsio_stack. 109 tiler = "mosaic" #: Image tiler program, e.g. mosaic 110 111 bitWiseProcessingDict = { 112 # stacking 113 'imstack':0, 114 # tiling 115 'nebuliser':3, 116 'mosaic':4, 117 'filtmos':5, 118 # external mosaic 119 'imageproc':6, 120 # headers 121 'prepHeaders':8, 122 # extraction 123 'sex':12, 124 'imcore':14, 125 'wcsfit':15, 126 'intUpdate':18, 127 'grout':19, 128 'classify':20, 129 # finish off 130 'calcZP':24, 131 'testProduct':25, 132 'complete':30, 133 134 } 135 136 # Inputs: these need to be set from command-line options 137 breakIntoParts = False #: Stack with intermediate stages? 138 dateRange = CuSession.sysc.obsCal.dateRange() #: Nights to stack. 139 dirDate = time.strftime("%Y%m%d", time.gmtime()) #: Output dir date. 140 doQFlagCheck = True #: Check that detection quality flags are set? 141 extractor = None #: Override database choice of catalogue extractor. 142 isCompressed = True #: Compress deep stack files? 143 maxProdID = None #: Maximum product ID of stack to be produced. 144 minProdID = None #: Minimum product ID of stack to be produced. 145 notDepOnly = False #: Use non-deprecated stacks only 146 numCPUs = multiprocessing.cpu_count() #: Number of CPUs to utilise. 147 numParts = None #: Force stacking to uses this number of stages. 148 numStack = None #: Number of intermediate stacks to be used. 149 redoHeader = False #: Don't create stacks just update headers? 150 releaseNum = None #: Release number of stacks to create. 151 reprocessTypes = None #: List of product types that need reprocessing. 152 restartDir = None #: Rerun in this directory 153 reqProdIDs = None #: List of product IDs for stacks to be created. 154 useDeep = False #: Use previous stack as part of this stack? 155 stackOnly = False #: Only create stacks 156 157 # Working variables 158 deepProductTable = None 159 """ A DataFactory.View of the contents of the database table 160 Required{Product} for current programme and product type. 161 """ 162 existingProducts = None 163 """ List of details of products already ingested in the database. 164 """ 165 166 fileListForType = None 167 """ Dictionary of paths to the ingest file lists referenced by product 168 type. This may be preset by the calling task or else defaults will be 169 used. 170 """ 171 prodInfo = None 172 """ The contents of the database table Required{Product} for current 173 programme and product type. 174 """ 175 pType = None 176 """ Product type currently being curated. 177 """ 178 reprocess = False 179 """ Reprocess existing products of the current type? 180 """ 181 reqProdsOfType = None 182 """ Dictionary of lists of product IDs referenced by product type, giving 183 the products required to be created. This may be preset by the calling 184 task or else is automatically determined from the database. 185 """ 186 _tmpWorkDir = None 187 """ Common temporary working directory for all threads. 188 """ 189 isDetTableTooBig = False 190 """ Test whether detection table is too big to get OB cat data from archive. 191 """ 192 193 # Outputs 194 newProductsOfType = None 195 """ Dictionary of ProductDetails for each type of product successfully 196 created. 197 """ 198 199 startedProductsOfType = None 200 """ Dictionary of ProductDetails for each type of product successfully 201 created. 202 """ 203 204 prodDirectory = None 205 """ Dictionary of directory for each type of product successfully 206 created. 207 """ 208 extractIgnoreMsgs = ["WCSFIT: Matching routine failed -- " 209 "MATCHSTDS: No rows in object catalogue", 210 "CLASSIFY: missing header info", 211 "CLASSIFY: can't update header", 212 ""] 213 """ List of messages to ignore when extracting 214 """ 215 216 missExtensionErrorMsgs = ["Error running imcore_conf", 217 "imcore_backstats: all values appear to be zero", 218 "Error running classify"] 219 """ List of messages to ignore when extracting if one bad extension 220 """ 221 222 tileOBDict = None 223 """ Dictionary of OB tiles for each tile product... 224 """ 225 226 pptTileDict = None 227 """ Look up for tile mfID based on pawprint mfID 228 """ 229 230 obNonTiles = None 231 """ Look up for OB components without a tile 232 """ 233 234 prodMfIDDict = None 235 """ Lookup for mfID of product if single epoch 236 """ 237 238 deepProductTableDict = None 239 """ Dictionary of product tables - one for each productType 240 """ 241 #-------------------------------------------------------------------------- 242
243 - def _onRun(self):
244 """ Create products. 245 """ 246 self.isDetTableTooBig = self.programmeID in [self.sysc.scienceProgs.get("VVV")] 247 # self.isDetTableTooBig = True 248 249 # Initial CU checks - will raise CuError if setup is incorrect 250 self.numCPUs = Cu13.getUsefulNumCPUs(self.numCPUs, self.programme, 251 self.stackOnly) 252 253 254 if self.numCPUs == multiprocessing.cpu_count(): 255 Logger.addMessage("<IMPORTANT> At certain times, this task will " 256 "require use of all available CPUs. Please ensure no other " 257 "tasks are running.") 258 259 if self.doQFlagCheck and not self.isDetTableTooBig: 260 self.checkQBFlags() 261 262 # Initialise member variables 263 self.extractor = (self.extractor 264 or self.programme.getAttr("extractTool")) 265 softVersNum = self.getSoftVersion() 266 if self.extractor == 'CASU': 267 268 Logger.addMessage("Processing with CASU tools: %s" % softVersNum) 269 else: 270 Logger.addMessage("Processing with Astromatic: %s" % softVersNum) 271 272 self.fileListForType = (self.fileListForType 273 or fits.createFileListDict(self.programme)) 274 275 if not self.newProductsOfType: 276 self.newProductsOfType = defaultdict(dict) 277 if not self.startedProductsOfType: 278 self.startedProductsOfType = defaultdict(dict) 279 if not self.prodDirectory: 280 self.prodDirectory = defaultdict(dict) 281 282 self.reprocessTypes = \ 283 (list(self.reprocessTypes) if self.reprocessTypes else []) 284 self._tmpWorkDir = self.sysc.tempWorkPath() 285 286 Logger.addMessage("Checking for existing %s products..." 287 % self.programme.getAcronym().upper()) 288 289 filterTable = df.Table("Filter", self.archive) 290 self.findContents() 291 292 if not self.reqProdsOfType: 293 self.reqProdsOfType = \ 294 queries.findRequiredDeepProducts(self.programme, self.minProdID, 295 self.maxProdID, self.reqProdIDs) 296 # Get reprocess status. 297 reprocStatus = queries.getRedoStatus(self.archive, self.programmeID, 298 self.reprocessTypes) 299 productTypes = (['stack'] if self.stackOnly else 300 self.programme.getProductTypes()) 301 if 'tile' in productTypes: 302 # Make sure that stack req are there 303 self.reqProdsOfType['stack'] = set( 304 self.archive.query( 305 "intProductID", "ProductLinks", 306 "programmeID=%s AND combiProdType='tile' AND " 307 "combiProductID IN (%s) AND intProdType='stack'" 308 % (self.programmeID, ','.join(map(str, self.reqProdsOfType['tile']))))) 309 310 self.deepProductTableDict = dict() 311 for self.pType in productTypes: 312 tableName = "Required" + self.pType.title() 313 curSchema = self.sysc.curationSchema() 314 columns = schema.parseTables(curSchema, [tableName])[0].columns 315 output = queries.getProductInfo(self.archive, self.programmeID, 316 self.pType, self.reqProdsOfType[self.pType]) 317 318 self.deepProductTableDict[self.pType] = df.View(map(str, columns), output) 319 320 self.reprocess = reprocStatus[self.pType] 321 # Needs to be updated after each product cycle to take into 322 # account any new products in the previous layer 323 self.existingProducts = self.archive.query( 324 selectStr="productID, M.frameType, releaseNum, " 325 "M.multiframeID, M.fileName AS imgName, " 326 "M2.fileName AS confName, M.catName", 327 fromStr="ProgrammeFrame P, Multiframe M, Multiframe M2", 328 whereStr="M.multiframeID=P.multiframeID AND programmeID=%s" 329 " AND M.%s AND M2.multiframeID=M.confID" 330 % (self.programmeID, DepCodes.selectNonDeprecated) 331 + " AND productID>0 AND M.frameType LIKE '%stack'") 332 333 if self.pType not in self.reqProdsOfType \ 334 or not self.reqProdsOfType[self.pType]: 335 Logger.addMessage("No %s products to process" % self.pType) 336 continue 337 338 Logger.addMessage("Preparing %s products..." % self.pType.upper()) 339 # @TODO: Why does the next few parts take so long for VHS. 340 self.setUpProductsInfo() 341 Logger.addMessage("Setup products") 342 # If stacks are components of tiles... 343 if self.pType == 'stack': 344 self.imageProductDict = queries.getAllFieldObsProd( 345 self.archive, self.reqProdsOfType[self.pType], 346 self.programmeID, self.pType, self.dateRange, 347 deprecated="deprecated=0", 348 noDeeps=((not self.useDeep) or self.reprocess), 349 onlyTileComps=self.programme.isShallowTiled()) 350 self.idImageProductDict = queries.getAllFieldObsProd( 351 self.archive, self.reqProdsOfType[self.pType], 352 self.programmeID, self.pType, self.dateRange, 353 deprecated=DepCodes.selectNonDeprecated, 354 noDeeps=((not self.useDeep) or self.reprocess), 355 onlyTileComps=self.programme.isShallowTiled()) 356 self.getProdMfID() 357 else: 358 self.imageProductDict = None 359 self.idImageProductDict = None 360 self.deepProdMfIDDict = None 361 if self.pType == 'stack' and 'tile' in productTypes: 362 # Simplify 363 # Check if current deeps are from before Aug 2012 364 # If so, and not reprocess stacks or ProductProcessing contains data 365 potDodgyPids = self.arePotDodgyDeeps() 366 if (len(potDodgyPids) > 0 and not self.reprocess and 367 not self.archive.queryEntriesExist("ProductProcessing", 368 "programmeID=%s and productType='stack'" % self.programmeID)): 369 if not CLI.isConfirmed("There are existing deep pawprints which " 370 "have been stacked in the incorrect order, so tiles " 371 "will have poor photometry. These are productIDs %s. " 372 "Confirm if you want to continue" % (','.join(map(str, potDodgyPids)))): 373 raise SystemExit() 374 self.tileOBDict = defaultdict(list) 375 self.obNonTiles = defaultdict(list) 376 self.pptTileDict = {} 377 self.compDict = {} 378 self.prodMfIDDict = {} 379 # Get expected order 380 self.getExpectedOrder(filterTable) # 381 # Only reset initially 382 # Reset any malformed products. 383 self.fileNameMfIDLookUp = dict([(os.path.join(os.path.basename( 384 os.path.dirname(fName)), os.path.basename(fName)), mfID) 385 for fName, mfID in self.archive.query( 386 selectStr="fileName,m.multiframeID", 387 fromStr="Multiframe as m,ProgrammeFrame as p", 388 whereStr="p.programmeID=%s and m.multiframeID=p.multiframeID " 389 "and frameType='stack'" % self.programmeID)]) 390 # If reusing deep products.... 391 #if (self.areMalformedProducts() and not reprocStatus[self.pType] 392 # and not self.archive.queryEntriesExist("ProductProcessing", 393 # "programmeID=%s and productType='stack'" % self.programmeID)): 394 # raise Cu13.CuError("There are existing deep pawprints which " 395 # "have been stacked in the incorrect order, so tiles " 396 # "will have poor photometry. Please rerun with the " 397 # "stack reprocessing option") 398 completeProducts = [] 399 probList = [] 400 newProducts = [] 401 #prodDep = set() 402 componentsOfProd = {} 403 productOfID = {} 404 Logger.addMessage("Checking %s %ss..." 405 % (len(self.reqProdsOfType[self.pType]), self.pType)) 406 407 for productID in sorted(self.reqProdsOfType[self.pType]): 408 # set up class 409 bestProduct = ProductDefinition(self, productID, filterTable) 410 createdProduct = \ 411 self.newProductsOfType[self.pType].get(productID) 412 # Now determine which products need additional processing 413 if self.pType == 'stack' and 'tile' in productTypes: 414 components = self.compDict[productID] 415 bestProduct.componentOrder = self.setComponentOrder(productID, components) 416 bestProduct.compProductType = bestProduct.productType 417 if components: 418 bestProduct.newProduct = True 419 else: 420 bestProduct.prodMfID = self.prodMfIDDict[productID] 421 else: 422 components = bestProduct.findRequiredInputs() 423 424 if components and not createdProduct: 425 newProducts.append(productID) 426 componentsOfProd[productID] = components 427 productOfID[productID] = bestProduct 428 429 elif bestProduct.prodMfID: 430 completeProducts.append((productID, bestProduct.prodMfID)) 431 432 elif createdProduct: 433 Logger.addMessage("ProductID=%s already created: %s" 434 % (productID, createdProduct.imageName), alwaysLog=False) 435 436 else: 437 raise Cu13.CuError("No data found for this %s:" 438 " productID=%s" % (self.pType, productID)) 439 440 #if (self.pType == "mosaic" and bestProduct.getDualProductID()): 441 # prodDep.add(bestProduct.getDualProductID()) 442 443 if bestProduct.problemComponents: 444 probList += bestProduct.problemComponents 445 446 if not completeProducts: 447 Logger.addMessage("ProgrammeFrame.productID is up-to-date for" 448 " completed %s products." % self.pType) 449 else: 450 Logger.addMessage("Updating ProgrammeFrame.productID for " 451 "complete %ss..." % self.pType) 452 total = 0 453 for productID, multiframeID in completeProducts: 454 total += self.archive.update("ProgrammeFrame", 455 entryList=[("productID", productID), 456 ("releaseNum", self.releaseNum)], 457 where="programmeID=%s AND multiframeID=%s" 458 % (self.programmeID, multiframeID)) 459 # update conf 460 self.archive.update("ProgrammeFrame", 461 entryList=[("productID", productID), 462 ("releaseNum", self.releaseNum)], 463 where="programmeID=%s AND Multiframe.multiframeID=%s" 464 " AND Multiframe.confID=ProgrammeFrame.multiframeID" 465 % (self.programmeID, multiframeID), 466 fromTables="Multiframe") 467 468 469 470 Logger.addMessage("...assigned %s completed products." % total) 471 472 if not newProducts: 473 Logger.addMessage("<Info> No new data available to create new" 474 " required %s products." % self.pType) 475 else: 476 newProducts.sort() 477 self.createProducts(newProducts, productOfID, componentsOfProd) 478 479 if probList: 480 Logger.addMessage("%s deep stacks have problematic components" 481 % len(probList)) 482 483 logOutputDir = (self.sysc.missingFilesFilePath() 484 if self.archive.isRealRun else 485 os.path.join(self.sysc.testOutputPath(), 486 self.sysc.productLogDir)) 487 488 utils.ensureDirExist(logOutputDir) 489 probFileName = "cu13_probFilelist%s_v%s.log" 490 probFileName %= (self.dirDate, self.releaseNum) 491 probFileList = os.path.join(logOutputDir, probFileName) 492 Logger.addMessage("Writing problem file list to " 493 + probFileList) 494 495 csv.File(probFileList, 'a').writelines(probList) 496 497 # Finish off 498 for pType in self.fileListForType: 499 if os.path.exists(self.fileListForType[pType]): 500 Logger.addMessage("Ingest release %s %ss from file list %s" 501 % (self.releaseNum, 502 pType, self.fileListForType[pType]))
503 504 #-------------------------------------------------------------------------- 505
506 - def getSoftVersion(self):
507 """ 508 """ 509 if self.extractor == 'CASU': 510 cmdPath = None 511 if self.casuSoftDir: 512 return 'casutools-' + os.path.dirname(self.casuSoftDir).split('casutools-')[1] 513 else: 514 # first find path 515 cmd = 'imstack' 516 for path in os.environ["PATH"].split(":"): 517 if os.path.exists(os.path.join(path, cmd)): 518 cmdPath = path 519 if cmdPath: 520 return 'casutools-' + os.readlink(os.path.dirname(cmdPath)).split('casutools-')[1] 521 else: 522 return "UNDEFINED" 523 else: 524 stdout = extp.run('sex --version') 525 if stdout: 526 return stdout[0] 527 else: 528 return "UNDEFINED"
529 #-------------------------------------------------------------------------- 530
531 - def arePotDodgyDeeps(self):
532 """ Simple test - anything before Aug 2012 is redone. 533 """ 534 # @FIXME If old deeps contain deprecated inputs, do not include 535 # Check if any Required X has multiple OBs - deeps needed and 536 # latest deep is at position... 537 deepStacks = set(pID for pID in self.reqProdsOfType['stack'] 538 if self.imageProductDict[pID]) 539 540 # Get ones that are deepStacks and not goodOldDeepPids. 541 goodOldDeepPids = set(self.archive.query( 542 "distinct s.productID", 543 "Multiframe as m,ProgrammeFrame as p,RequiredStack as s", 544 "p.programmeID=%s and p.productID>0 and p.multiframeID=m.multiframeID " 545 "and frameType like 'deep%%stack' and frameType not like " 546 "'%%tile%%' and deprecated in (0,255) and creationDate " 547 "BETWEEN '20120831' AND '21000101' and s.filterID=m.filterID and " 548 "s.programmeID=p.programmeID and " 549 "dbo.fGreatCircleDist(s.ra,s.dec,(15.*rabase),decbase)<(60.*s.stackRadius)" 550 % self.programmeID)) 551 552 potDodgyDeeps = deepStacks - goodOldDeepPids 553 554 # Are any of these oldDeepsPids 555 # Could just be new pointings 556 # @FIXME: Sometimes components in old deeps deprecated and now just a 557 # single pointing...... 558 # Optional is it safe to continue... instead of error, listing 559 # PIDs which may have problems... 560 oldDeepsPids = self.archive.query( 561 "distinct s.productID", 562 "Multiframe as m,ProgrammeFrame as p,RequiredStack as s", 563 "p.programmeID=%s and p.productID>0 and p.multiframeID=m.multiframeID " 564 "and frameType like 'deep%%stack' and frameType not like " 565 "'%%tile%%' and deprecated in (0,255) and s.filterID=m.filterID and " 566 "s.programmeID=p.programmeID and " 567 "dbo.fGreatCircleDist(s.ra,s.dec,(15.*rabase),decbase)<(60.*s.stackRadius) " 568 "and creationDate BETWEEN '20090930' AND '20120831'" 569 % self.programmeID) 570 571 return potDodgyDeeps.intersection(oldDeepsPids)
572 573 #-------------------------------------------------------------------------- 574
575 - def getProdMfID(self):
576 """ Get dictionary of productMfIDs 577 """ 578 self.deepProdMfIDDict = {} 579 if not self.reprocess and self.useDeep: 580 frameTypeSel = queries.getFrameSelection('stack', deepOnly=True) 581 productMfIDs = self.archive.query( 582 selectStr="s.productID,m.multiframeID", 583 fromStr="ProgrammeFrame as p,Multiframe as m, RequiredStack as s", 584 whereStr="s.programmeID=p.programmeID AND " 585 "m.multiframeID=p.multiframeID AND %s AND " 586 "deprecated IN (%s,%s) AND s.programmeID=%d AND " 587 "m.filterID=s.filterID AND dbo.fGreatCircleDist(s.ra, " 588 "s.dec, 15*rabase, decbase) < 60*s.stackRadius" 589 % (frameTypeSel, DepCodes.nonDep, 590 DepCodes.reprocWFAU, self.programmeID), 591 orderBy="s.productID,m.multiframeID DESC") 592 # Select first mfID for each product... 593 if productMfIDs: 594 prodID = productMfIDs[0].productID 595 firstOne = True 596 for productID, mfID in productMfIDs: 597 if productID == prodID and firstOne: 598 self.deepProdMfIDDict[productID] = mfID 599 firstOne = False 600 else: 601 if productID != prodID: 602 self.deepProdMfIDDict[productID] = mfID 603 prodID = productID 604 firstOne = False
605 606 607 #-------------------------------------------------------------------------- 608
609 - def areMalformedProducts(self):
610 """ Check to see if their are problems with OBs in wrong order in existing products 611 """ 612 badExistProd = False 613 for productID in self.reqProdsOfType['tile']: 614 # NO: This gets previous components if any, but doesn't look at previous deeps... 615 pwpProductIDs = self.pawprintProdDict[productID] 616 # check existing and new products 617 # productID,frameType,releaseNum,mfID,fileName,confName,catName 618 #ppProd = [(exPrd.productID, exPrd.imgName) for exPrd in self.existingProducts 619 # if 'tile' not in exPrd.frameType and 620 # exPrd.frameType.endswith('stack') and 621 # exPrd.productID in pwpProductIDs] 622 # productID,mfID,fileName,confName,catName 623 #ppProd += [(nwPrd.productID, nwPrd.fileName) for nwPrd in self.newProductsOfType['stack'] 624 # if nwPrd.productID in pwpProductIDs] 625 for pwpProdID in pwpProductIDs: 626 if productID in self.inputDeepPaw: 627 ppProd = [(nwPrd.productID, nwPrd.imageName) for nwPrd in self.inputDeepPaw[productID] 628 if nwPrd.productID == pwpProdID] 629 badExistProd = self.testProv(ppProd, productID, badExistProd) 630 631 return badExistProd
632 633 #------------------------------------------------------------------------------ 634
635 - def getExpectedOrder(self, filterTable):
636 """ For each tile work out best OB order 637 """ 638 pLinks = self.archive.query( 639 selectStr="combiProductID,intProductID", 640 fromStr="ProductLinks", 641 whereStr="programmeID=%s and combiProdType='tile' and " 642 "intProdType='stack'" % self.programmeID) 643 tileProds = sorted(list(set([cPID for cPID, _iPID in pLinks]))) 644 self.pawprintProdDict = dict() 645 for tileID in tileProds: 646 self.pawprintProdDict[tileID] = [iPID for cPID, iPID in pLinks 647 if tileID == cPID] 648 self.inputDeepPaw = dict() 649 for productID in self.reqProdsOfType['tile']: 650 obTileDict = dict() 651 obTileSet = set() 652 for pwpProdID in self.pawprintProdDict[productID]: 653 pwpProd = ProductDefinition(self, pwpProdID, filterTable) 654 self.compDict[pwpProdID] = pwpProd.findRequiredInputs() 655 self.prodMfIDDict[pwpProdID] = pwpProd.prodMfID 656 pwpList = (self.archive.query( 657 "%s as productID,m.multiframeID,m.fileName as imageName," 658 "c.fileName as confName,m.catName" % pwpProdID, 659 "Multiframe as m,Multiframe as c", 660 "m.confID=c.multiframeID and m.multiframeID=%s" 661 % pwpProd.prodMfID) 662 if pwpProd.prodMfID else self.compDict[pwpProdID]) 663 664 # Break any deeps into parts... 665 obPwpList = self.getOBList(productID, pwpProdID, pwpList) 666 obTileDict[pwpProdID] = self.getTiles(obPwpList) if obPwpList else [] 667 668 [obTileSet.add((tileInfo.combiframeID, tileInfo.mjdObs)) for tileInfo in 669 obTileDict[pwpProdID] if tileInfo.combiframeID > 0] 670 self.obNonTiles[pwpProdID] = [(tileInfo.mjdObs, tileInfo.multiframeID) 671 for tileInfo in obTileDict[pwpProdID] 672 if tileInfo.combiframeID < 0] 673 self.pptTileDict.update([(tileInfo.multiframeID, tileInfo.combiframeID) 674 for tileInfo in obTileDict[pwpProdID]]) 675 676 # Get ordered list of inputs with no of times they occur 677 obTileOrder = sorted(list(obTileSet), key=itemgetter(1)) 678 679 compTiles = [] 680 inCompTiles = [] 681 for tlmfID, _mjdobs in obTileOrder: 682 isComplete = (len([pwpProdID for pwpProdID in self.pawprintProdDict[productID] 683 if tlmfID in [tlm for tlm, _ppm, _mjd in obTileDict[pwpProdID]]]) 684 == len(self.pawprintProdDict[productID])) 685 if isComplete: 686 compTiles.append((tlmfID, True)) 687 else: 688 inCompTiles.append((tlmfID, False)) 689 # Non-tiles...pawprints not associated 690 691 # @TODO: Do we need to group? 692 # self.groupNonTiles(obNonTiles) 693 694 695 696 if not compTiles: 697 self.tileOBDict[productID] = None 698 else: 699 self.tileOBDict[productID] = compTiles + inCompTiles
700 701 #----------------------------------------------------------------- 702 703
704 - def getOBList(self, productID, pwpProdID, pwpList):
705 """ Splits any deeps into components 706 """ 707 deepMfIDs = [pwp.multiframeID for pwp in pwpList 708 if '_dp' in pwp.imageName] 709 self.inputDeepPaw[productID] = [Cu13.ProductDetails(pwpProdID, pwp.multiframeID, pwp.imageName, pwp.confName, pwp.catName) for pwp in pwpList 710 if '_dp' in pwp.imageName] 711 obList = [pwp for pwp in pwpList if '_dp' not in pwp.imageName] 712 if deepMfIDs: 713 obList += self.archive.query( 714 selectStr="%s as productID,m.multiframeID,m.fileName as imageName," 715 "c.fileName as confName,m.catName" % dbc.intDefault(), 716 fromStr="Multiframe as m,Provenance as v,Multiframe as c", 717 whereStr="v.combiframeID in (%s) and v.multiframeID=" 718 "m.multiframeID and c.multiframeID=m.confID" % 719 ','.join(map(str, deepMfIDs))) 720 return obList
721 722 #----------------------------------------------------------------- 723
724 - def getTiles(self, pawprintComps):
725 """ Find OB tiles made from these components 726 """ 727 tiles = self.archive.query( 728 selectStr="combiframeID,v.multiframeID,mjdObs", 729 fromStr="Provenance as v,Multiframe as m", 730 whereStr="v.multiframeID in (%s) and m.multiframeID=v.combiframeID " 731 "and m.frameType='tilestack' ORDER BY mjdObs" % ','.join(map(str, 732 [comp.multiframeID for comp in pawprintComps]))) 733 missingOBppts = (set([comp.multiframeID for comp in pawprintComps]) - 734 set([gt.multiframeID for gt in tiles])) 735 if missingOBppts: 736 missTiles = self.archive.query( 737 selectStr="(-ROW_NUMBER() OVER (ORDER BY m.multiframeID)) as combiframeID," 738 "m.multiframeID,mjdObs", 739 fromStr="Multiframe as m,MultiframeEsoKeys as e", 740 whereStr="m.multiframeID in (%s) and m.multiframeID=e.multiframeID " 741 "ORDER BY mjdObs" % ','.join(map(str, missingOBppts))) 742 tiles += missTiles 743 return tiles
744 745 #-------------------------------------------------------------------------- 746 747
748 - def testProv(self, pawprintProducts, tileProdID, badOrder):
749 """ Get list of OBs that went into pawprint 750 """ 751 # Check first frame in e 752 for productID, fileName in pawprintProducts: 753 # Open fileName. read in header info 754 hdulist = fits.open(fileName) 755 fMfIDList = [] 756 provNo = 1 757 key = 'PROV%04d' % provNo 758 # All extensions have same list 759 while key in hdulist[1].header: 760 fMfIDList.append(self.fileNameMfIDLookUp[hdulist[1].header[key]]) 761 provNo += 1 762 key = 'PROV%04d' % provNo 763 hdulist.close() 764 for ii, obPaw in enumerate(fMfIDList): 765 if ii >= len(self.tileOBDict[tileProdID]): 766 # More prov than in good tileOBs 767 # May be in self.obNonTiles 768 if not (obPaw in self.obNonTiles[productID]): 769 badOrder = True 770 elif not ((self.pptTileDict[obPaw] == self.tileOBDict[tileProdID][ii][0] and 771 self.tileOBDict[tileProdID][ii][1]) or 772 not self.tileOBDict[tileProdID][ii][1]) : 773 badOrder = True 774 775 776 return badOrder
777 #-------------------------------------------------------------------------- 778
779 - def setComponentOrder(self, productID, components):
780 """ Creates an order for the stacking. 781 """ 782 # DEEPS... 783 # components - productID multiframeID imageName confName catName 784 tileProdID = self.archive.query("combiProductID", "ProductLinks", 785 "programmeID=%s and combiProdType='tile' and intProdType='stack' " 786 "and intProductID=%s" % (self.programmeID, productID), 787 firstOnly=True) 788 compOrder = [comp for comp in components if self.sysc.deepSuffix in comp.imageName] 789 790 compOrder += [comp for tlmfID, _isComp in self.tileOBDict[tileProdID] 791 for comp in components 792 if self.sysc.deepSuffix not in comp.imageName and self.pptTileDict[comp.multiframeID] == tlmfID] 793 compOrder += [comp for _mjd, mfID in self.obNonTiles[productID] 794 for comp in components 795 if self.sysc.deepSuffix not in comp.imageName and comp.multiframeID == mfID] 796 return compOrder
797 798 #-------------------------------------------------------------------------- 799
800 - def createProducts(self, newProducts, productOfID, componentsOfProd):
801 """ 802 Creates the products in the given list. 803 804 """ 805 spaceReq = self.calcDiskSpaceRequired(len(newProducts)) 806 outputDir = self.createProductDir(spaceReq) 807 for productID in newProducts: 808 productOfID[productID].outputDir = outputDir 809 productType = productOfID[productID].productType 810 811 Logger.addMessage("%ss will be produced in %s" 812 % (self.pType.title(), outputDir)) 813 814 Logger.addMessage("Number of %ss to be created or updated: %s" 815 % (self.pType, len(newProducts))) 816 817 Logger.addMessage("...with productIDs: " 818 + utils.numberRange(newProducts)) 819 820 # For each process, work out how many threads can be used 821 if productType == 'stack': 822 # stack - do the following processes 823 self.createStacks(newProducts, productOfID, componentsOfProd) 824 825 elif productType == 'tile': 826 # tile - do the following processes 827 self.createTiles(newProducts, productOfID, componentsOfProd) 828 829 elif productType == 'mosaic': 830 self.createMosaics(newProducts, productOfID, componentsOfProd) 831 832 # Splitting the list of products to work on into chunks of numThreads 833 # @TODO: This will always be as slow as slowest product, instead prefer 834 # to always have X active threads at any time so it starts a new 835 # product in a given thread as soon as one has finished, working 836 # through the original list. To do this we need to split the 837 # list into X=numThreads parts, with each chunk being total/X. 838 # (or even better have a common list to all threads that each 839 # pops off the current worker to minimise joining time at end) 840 # *However*, could only do this if we commit to doing all tiles 841 # followed by all catalogues in series, due to the fact 842 # catalogues can't be parallelised due to database queries. 843 newFiles = [] 844 for productID in newProducts: 845 product = productOfID[productID] 846 if os.path.exists(product.fileName()): 847 newFiles.append(product.fileName()) 848 newFiles.append(product.confName()) 849 if os.path.exists(product.catName()): 850 newFiles.append(product.catName()) 851 self.newProductsOfType[self.pType][productID] = \ 852 Cu13.ProductDetails(productID, dbc.intDefault(), 853 product.fileName(), product.confName(), 854 product.catName()) 855 856 filePath = self.fileListForType[self.pType] 857 Logger.addMessage("Saving the list of %ss created for release %s to %s" 858 % (self.pType, self.releaseNum, filePath)) 859 860 file(filePath, 'a').write('\n'.join(newFiles) + '\n')
861 862 #-------------------------------------------------------------------------- 863
864 - def calcProcessLayers(self, processType, products, productOfID, 865 componentsOfProducts):
866 """ 867 Organise products into processing layers. 868 869 """ 870 # No of threads per product 871 if processType == 'nebuliser': 872 noCPUsPerProduct = 6 873 else: 874 noCPUsPerProduct = 1 875 876 # Order products by memory used 877 # check products to see if process has been completed: 878 products = self.hasProcessBeenRun(products, productOfID, processType) 879 if processType == 'imstack': 880 memoryPerProduct = [(productID, self.getRequiredMemory( 881 self.getImageList(componentsOfProducts[productID]))) 882 for productID in products] 883 884 elif processType == 'mosaic' or processType == 'filtmos': 885 memoryPerProduct = [] 886 for productID in products: 887 bestProd = productOfID[productID] 888 microStepping = (bestProd.deepProductTable['nuStep'][bestProd.index] 889 if self.sysc.hasMicroStep else 1) 890 bitPix = 4 891 memoryPerProduct.append((productID, 892 (3 * bitPix * self.sysc.fieldOfViewTile[0] 893 * self.sysc.fieldOfViewTile[1] 894 * (3600 * math.sqrt(microStepping) / self.sysc.instrumentPixScale) ** 2) 895 / SystemConstants.one_kilobyte)) 896 897 elif processType == 'imcore' or processType == 'sex': 898 # @TODO: Might be lower for SExtractor? 899 memoryPerProduct = [] 900 for productID in products: 901 bestProd = productOfID[productID] 902 memoryPerProduct.append((productID, 903 3 * getFileSizeKiloBytes(bestProd.fileName(), True))) 904 905 elif processType == 'imageproc': 906 memoryPerProduct = [] 907 for productID in products: 908 bestProd = productOfID[productID] 909 pixSize = bestProd.deepProductTable['pixelSize'][bestProd.index] 910 microStep = (bestProd.deepProductTable['nuStep'][bestProd.index] 911 if self.sysc.hasMicroStep else 1) 912 if pixSize < 0.: 913 pixSize = self.sysc.instrumentPixScale / math.sqrt(microStep) 914 raExt = bestProd.deepProductTable['raExtent'][bestProd.index] 915 decExt = bestProd.deepProductTable['decExtent'][bestProd.index] 916 bitPix = 4 917 memoryPerProduct.append((productID, 918 (3 * bitPix * raExt * decExt * ((3600. / pixSize) ** 2) / 919 SystemConstants.one_kilobyte))) 920 921 else: 922 memoryPerProduct = [(productID, self.getRequiredMemory([ 923 self.getImageList(componentsOfProducts[productID])[0]])) 924 for productID in products] 925 926 # How many threads can be used and how many layers are needed 927 return self.organiseProducts(memoryPerProduct, noCPUsPerProduct)
928 929 #-------------------------------------------------------------------------- 930
931 - def hasProcessBeenRun(self, products, productOfID, processType):
932 """ 933 """ 934 bitWiseValue = 2 ** self.bitWiseProcessingDict[processType] 935 completeProducts = self.archive.query("productID, fileName", 936 fromStr="ProductProcessing", 937 whereStr="programmeID=%s AND productType = '%s' AND productID IN (%s)" 938 " AND (bitProcessingFlag&%s>0 OR bitProcessingFlag&%s>0)" 939 % (self.programmeID, productOfID[products[0]].productType, 940 ','.join(map(str, products)), bitWiseValue, 941 2 ** self.bitWiseProcessingDict['complete'])) 942 943 completeProducts = set(productID 944 for productID, fileName in completeProducts 945 if productOfID[productID].fileName() == fileName) 946 947 return sorted(set(products) - completeProducts)
948 949 #-------------------------------------------------------------------------- 950
951 - def organiseProducts(self, memoryPerProduct, noProcessesPerProduct):
952 """ 953 """ 954 # First are any products over the memory limit? 955 maxNThreads = self.numCPUs 956 productInfo = [] 957 processLayers = [] 958 for productID, memory in memoryPerProduct: 959 960 if memory > Cu13.memoryLimit: 961 numParts = self.numParts or \ 962 int(math.ceil(memory / Cu13.memoryLimit)) 963 964 # These must be processed separately 965 # All the way through (at least until stacking finished) 966 processLayers.append(([(productID, numParts)], True)) 967 else: 968 nThreads = min(int(math.floor(Cu13.memoryLimit / memory)), 969 maxNThreads) 970 971 productInfo.append((productID, nThreads)) 972 973 productInfo = sorted(productInfo, key=itemgetter(1)) 974 prodIndex = 0 975 prodProcNo = 0 976 index = noProcessesPerProduct * prodIndex + prodProcNo 977 while index < (len(productInfo) * noProcessesPerProduct): 978 productID, nThreads = productInfo[prodIndex] 979 productsInLayer = [] 980 for _ii in range(nThreads): 981 if (index < len(productInfo) * noProcessesPerProduct): 982 productsInLayer.append((productInfo[prodIndex][0], prodProcNo)) 983 prodProcNo += 1 984 if prodProcNo == noProcessesPerProduct: 985 prodProcNo = 0 986 prodIndex += 1 987 index = noProcessesPerProduct * prodIndex + prodProcNo 988 processLayers.append((productsInLayer, False)) 989 990 # Now organise products.... 991 return processLayers
992 993 #-------------------------------------------------------------------------- 994
995 - def getImageList(self, compOfProducts):
996 """ 997 """ 998 imageList = [] 999 for _prodID, _mfID, imageName, confName, catName in compOfProducts: 1000 # calculate BSCALE 1001 imageName = imageName.split(':')[-1] 1002 confName = confName.split(':')[-1] 1003 catName = catName.split(':')[-1] 1004 imageList.append(fits.FileList(imageName, confName, catName)) 1005 1006 return imageList
1007 1008 #-------------------------------------------------------------------------- 1009
1010 - def compressProd(self, bestProduct):
1011 """ Compressed all necessary files associated with the product. 1012 """ 1013 compressFileList = [bestProduct.fileName(), bestProduct.confName()] 1014 if bestProduct.isTile(): 1015 compressFileList += [bestProduct.filteredName(), 1016 bestProduct.filtConfName()] 1017 Logger.addMessage("Compressing FITS files... %s" 1018 % ','.join(compressFileList), alwaysLog=False) 1019 fits.compressFits(compressFileList)
1020 1021 #-------------------------------------------------------------------------- 1022
1023 - def fixBadExtensions(self, bestProduct, zeroExtNums):
1024 """ 1025 """ 1026 confFile = fits.open(bestProduct.confName(), 'update') 1027 for extNum in zeroExtNums: 1028 # Set confidence to equal 100 1029 confFile[extNum].data.fill(100) 1030 fits.checkForUnicodeError(confFile, bestProduct.confName()) 1031 confFile.close()
1032 1033 #-------------------------------------------------------------------------- 1034
1035 - def createCatalogue(self, bestProduct, components, dualImageProduct=None, 1036 isQuick=False):
1037 """ 1038 Extracts the catalogue for the given product, using the extraction 1039 tool for the programme. 1040 1041 """ 1042 # @TODO: Is this OBSOLETE. Not quite yet. Needs rewriting 1043 Logger.addMessage("Extracting catalogue for %s %s" 1044 % (bestProduct.productType, bestProduct.fileName()), 1045 alwaysLog=(bestProduct.productType == 'mosaic')) 1046 1047 #isNewCat = not self.archive.isTrialRun and not bestProduct.catExists()\ 1048 # and (self.extractor == "CASU" or self.extractor == "SEX") 1049 #@TODO: Revert to above 1050 #isNewCat = not bestProduct.catExists() \ 1051 # and (self.extractor == "CASU" or self.extractor == "SEX") 1052 productID = bestProduct.deepProductTable["productID"][bestProduct.index] 1053 prodOfID = {productID:bestProduct} 1054 componentsOfProd = {productID:components} 1055 # @TODO: use ProductProcessing to determine what is to be run 1056 if self.extractor == "CASU": 1057 processLayers = self.calcProcessLayers('imcore', [productID], prodOfID, None) 1058 for productInfo, _isParts in processLayers: 1059 productIDs = [prodID for prodID, _partNum in productInfo] 1060 self.runInitialCasuExtraction(productIDs, prodOfID, 1061 componentsOfProd, stackInfoDict=None) 1062 elif self.extractor == "SEX": 1063 # Only use DUAL FILTER if mosaic 1064 processLayers = self.calcProcessLayers('sex', [productID], prodOfID, None) 1065 for productInfo, _isParts in processLayers: 1066 productIDs = [prodID for prodID, _partNum in productInfo] 1067 if productIDs: 1068 sx = SExtractorInterface(self.archive, bestProduct, dualImageProduct) 1069 sx.extractCatalogue() 1070 sx.calcAperCor() 1071 self.productProcessing(productIDs, prodOfID, 'sex') 1072 processLayers = self.calcProcessLayers('calcZP', [productID], prodOfID, 1073 componentsOfProd) 1074 for productInfo, _isParts in processLayers: 1075 productIDs = [prodID for prodID, _partNum in productInfo] 1076 if productIDs: 1077 Logger.addMessage("Calculating zero-point and updating headers...", 1078 alwaysLog=(bestProduct.productType == 'mosaic')) 1079 1080 fits.removeDuplicateKeywords(bestProduct.catName()) 1081 1082 # Update FITS headers 1083 if not isQuick: 1084 updateHeaders(bestProduct, self.calcZP(bestProduct, components), 1085 pu.Provenance(self.archive, bestProduct.fileName(), 1086 components=components)) 1087 self.productProcessing([productID], prodOfID, 'calcZP') 1088 self.testProduct(bestProduct, components) 1089 self.productProcessing([productID], prodOfID, 'testProduct') 1090 # Do not compress... 1091 self.productProcessing([productID], prodOfID, isComplete=True)
1092 1093 #-------------------------------------------------------------------------- 1094
1095 - def createStacks(self, newProducts, productOfID, componentsOfProd):
1096 """ 1097 Creates the deep stack of given product ID in the output directory when 1098 supplied a list of component images (from L{queries.getStacks()}). 1099 If the number of images being stacked requires more than the available 1100 amount of physical system memory then the stacking is broken up into 1101 parts, unless overridden by the L{Cu13.breakIntoParts} option. 1102 1103 @return: List of broken extensions. 1104 @rtype: list(int) 1105 1106 """ 1107 processLayers = self.calcProcessLayers('imstack', newProducts, 1108 productOfID, componentsOfProd) 1109 1110 numToBeStacked = len(set(prodID 1111 for productInfo, _isParts in processLayers 1112 for prodID, _nParts in productInfo)) 1113 stackInfoDict = {} 1114 Logger.addMessage("Stacking %s stacks..." % numToBeStacked) 1115 progress = ForLoopMonitor(processLayers) 1116 for productInfo, isParts in processLayers: 1117 productIDs = [prodID for prodID, _partNum in productInfo] 1118 if productIDs: 1119 self.productProcessing(productIDs, productOfID, initialise=True) 1120 1121 if isParts: 1122 # Stacks exceed memory limit 1123 # create stacks in series (breaking each into parts) 1124 for productID, numParts in productInfo: 1125 bestProduct = productOfID[productID] 1126 currentFiles, expTimes = \ 1127 bestProduct.getImageDetails(componentsOfProd[productID]) 1128 1129 stackInfoDict[productID] = (currentFiles, expTimes) 1130 partFiles = [] 1131 # @TODO: Is the concept of iterations really useful? 1132 # It could lead to an infinite loop... 1133 iteration = 0 1134 isComplete = self.redoHeader and bestProduct.imageExists() 1135 part = 1 1136 while not isComplete: 1137 stkSize = (0 if not self.breakIntoParts else 1138 self.getRequiredMemory(currentFiles)) 1139 1140 if iteration == 0: 1141 numParts = self.numParts or \ 1142 int(math.ceil(stkSize / Cu13.memoryLimit)) 1143 else: 1144 numParts = int(math.ceil(stkSize / Cu13.memoryLimit)) 1145 1146 if numParts <= 1: 1147 self.stackImages(currentFiles, bestProduct, expTimes) 1148 break 1149 1150 nImagePerPart = \ 1151 max(int(math.ceil(len(currentFiles) / numParts)), 2) 1152 1153 splitInputFiles = \ 1154 list(utils.splitList(currentFiles, nImagePerPart, 1155 noSingles=True)) 1156 1157 numParts = len(splitInputFiles) 1158 if numParts <= 1: 1159 self.stackImages(currentFiles, bestProduct, expTimes) 1160 break 1161 1162 Logger.addMessage("Splitting the stacking into %s parts" 1163 % numParts, alwaysLog=False) 1164 1165 deepPartFiles = [] 1166 for inputFiles in splitInputFiles: 1167 bestProduct.part = part 1168 self.stackImages(inputFiles, bestProduct, expTimes) 1169 deepPartFiles.append(bestProduct.files()) 1170 part += 1 1171 1172 bestProduct.part = None 1173 iteration += 1 1174 partFiles += deepPartFiles 1175 currentFiles = deepPartFiles 1176 1177 self.productProcessing([productID], productOfID, 'imstack') 1178 1179 # Delete any parts 1180 for filePath in partFiles: 1181 os.remove(filePath.image) 1182 if filePath.conf: 1183 os.remove(filePath.conf) 1184 if filePath.cat: 1185 os.remove(filePath.cat) 1186 1187 else: # run layers in parallel 1188 prodThreads = [] 1189 for productID in productIDs: 1190 bestProduct = productOfID[productID] 1191 allFiles, expTimes = \ 1192 bestProduct.getImageDetails(componentsOfProd[productID]) 1193 stackInfoDict[productID] = (allFiles, expTimes) 1194 prodThreads.append( 1195 multiprocessing.Process(target=self.stackImages, 1196 args=(allFiles, bestProduct, expTimes))) 1197 1198 # Do stacking 1199 if productIDs: 1200 msg = "Stacking images with productIDs " 1201 msg += utils.numberRange(productIDs) 1202 Logger.addMessage(msg, alwaysLog=False) 1203 1204 self.runThreads(prodThreads) 1205 self.productProcessing(productIDs, productOfID, 'imstack') 1206 1207 msg = msg.replace('ing', 'ed') if productIDs else '' 1208 progress.testForOutput(msg) 1209 # @TODO: Parallel process prepHeaders 1210 products = \ 1211 self.hasProcessBeenRun(newProducts, productOfID, 'prepHeaders') 1212 1213 if products: 1214 msg = ("of all new products" if newProducts == products else 1215 "with productIDs " + utils.numberRange(products)) 1216 1217 Logger.addMessage("Updating headers for images " + msg) 1218 1219 for productID in products: 1220 bestProduct = productOfID[productID] 1221 if productID in stackInfoDict: 1222 allFiles, expTimes = stackInfoDict[productID] 1223 else: 1224 allFiles, expTimes = \ 1225 bestProduct.getImageDetails(componentsOfProd[productID]) 1226 allFiles = self.getOBLevel(allFiles, bestProduct) 1227 fits.prepHeaders(bestProduct.fileName(), bestProduct.confName(), 1228 allFiles, expTimes, 1229 dfsIDString=(self.programme.getAttr("dfsIDString") 1230 if self.sysc.isWSA() else None), 1231 bScale=(bestProduct.bScale 1232 if self.sysc.hasEsoRelease else None)) 1233 1234 self.productProcessing([productID], productOfID, 'prepHeaders') 1235 1236 # Delete any intermediate stack confidence images 1237 for filePath in allFiles: 1238 if bestProduct.outputDir in filePath.conf: 1239 os.remove(filePath.conf) 1240 self.runInitialCasuExtraction(newProducts, productOfID, 1241 componentsOfProd, stackInfoDict)
1242 1243 #-------------------------------------------------------------------------- 1244
1245 - def getOBLevel(self, allFiles, bestProduct):
1246 """ 1247 """ 1248 if bestProduct.isTile() or bestProduct.isMosaic(): 1249 return allFiles 1250 deeps = [af for af in allFiles if self.sysc.deepSuffix in af.image] 1251 nonDeeps = [af for af in allFiles if self.sysc.deepSuffix not in af.image] 1252 while deeps: 1253 # Should only be one deep 1254 if len(deeps) > 1: 1255 Cu13.CuError("More than one old deep used to construct %s" % bestProduct.fileName()) 1256 # Get OB files.. 1257 provFileNames = self.archive.query( 1258 "m2.fileName", "Multiframe as m1,Provenance as v,Multiframe as m2", 1259 "v.combiframeID=m1.multiframeID and m1.fileName like '%%%s' and " 1260 "v.multiframeID=m2.multiframeID order by m2.mjdObs" % 1261 os.path.join(os.path.basename(os.path.dirname(deeps[0].image)), 1262 os.path.basename(deeps[0].image))) 1263 newFiles = [fits.FileList(pfn, fits.getConfMap(pfn), fits.getCatalogue(pfn)) for pfn in provFileNames] 1264 nonDeeps = newFiles + nonDeeps 1265 deeps = [af for af in newFiles if self.sysc.deepSuffix in af.image] 1266 return nonDeeps
1267 1268
1269 - def runInitialCasuExtraction(self, newProducts, productOfID, 1270 componentsOfProd, stackInfoDict=None):
1271 """ 1272 """ 1273 # process catalogues in parallel 1274 processLayers = self.calcProcessLayers('imcore', newProducts, 1275 productOfID, componentsOfProd) 1276 Logger.addMessage("Extracting...") 1277 areZeroSizeExtsDict = {} 1278 for productID in newProducts: 1279 bestProduct = productOfID[productID] 1280 areZeroSizeExtsDict[productID] = \ 1281 any(ii > 0 and hdu.header["TEXPTIME"] == 0 1282 for ii, hdu in enumerate(fits.open(bestProduct.fileName()))) 1283 1284 progress = ForLoopMonitor(processLayers) 1285 for productInfo, _isParts in processLayers: 1286 productIDs = [prodID for prodID, _partNum in productInfo] 1287 prodThreads = [] 1288 for productID in productIDs: 1289 bestProduct = productOfID[productID] 1290 prodThreads.append(multiprocessing.Process( 1291 target=self.extractCat, 1292 args=(bestProduct, areZeroSizeExtsDict[productID]))) 1293 1294 # Do extracting 1295 if productIDs: 1296 msg = "Extracting products: " + utils.numberRange(productIDs) 1297 Logger.addMessage(msg, alwaysLog=False) 1298 1299 self.runThreads(prodThreads) 1300 self.productProcessing(productIDs, productOfID, 'imcore') 1301 1302 msg = msg.replace('ing', 'ed') if productIDs else '' 1303 progress.testForOutput(msg) 1304 1305 self.fitWCS(newProducts, productOfID, componentsOfProd, inParallel=True, 1306 areZeroSizeExtsDict=areZeroSizeExtsDict) 1307 1308 # Fix any conf images - use zeroExtNums 1309 proc = 'intUpdate' if bestProduct.isTile() else 'calcZP' 1310 products = self.hasProcessBeenRun(newProducts, productOfID, proc) 1311 if products: 1312 msg = ("all new products" if newProducts == products else 1313 "products " + utils.numberRange(products)) 1314 1315 Logger.addMessage("Calculating ZP and updating headers for " + msg) 1316 1317 progress = ForLoopMonitor(products) 1318 for productID in products: 1319 if bestProduct.isTile(): 1320 Logger.addMessage("ProductID=%s" % productID, alwaysLog=False) 1321 bestProduct = productOfID[productID] 1322 # @TODO: Why not componentsOfProd[productID] 1323 components = bestProduct.findRequiredInputs() 1324 zpOfExt = self.calcZP(bestProduct, components, isGrt=True) 1325 prov = pu.Provenance(self.archive, bestProduct.fileName(), 1326 components=components) 1327 1328 updateHeaders(bestProduct, zpOfExt, prov, isGrt=True) 1329 self.productProcessing([productID], productOfID, 'intUpdate') 1330 else: 1331 Logger.addMessage("ProductID=%s" % productID, alwaysLog=False) 1332 bestProduct = productOfID[productID] 1333 Logger.addMessage("Calculating zero-point and updating headers...", 1334 alwaysLog=(bestProduct.productType == 'mosaic')) 1335 1336 fits.removeDuplicateKeywords(bestProduct.catName()) 1337 1338 # Update FITS headers 1339 updateHeaders(bestProduct, 1340 self.calcZP(bestProduct, componentsOfProd[productID]), 1341 pu.Provenance(self.archive, bestProduct.fileName(), 1342 components=componentsOfProd[productID])) 1343 1344 if productID in stackInfoDict: 1345 _allFiles, expTimes = stackInfoDict[productID] 1346 else: 1347 _allFiles, expTimes = \ 1348 bestProduct.getImageDetails(componentsOfProd[productID]) 1349 1350 # Check expTimes - TODO: Do this within fixBadExtensions? 1351 zeroExtNums = \ 1352 [i + 1 for i in utils.getListIndices(expTimes, value=0)] 1353 1354 self.fixBadExtensions(bestProduct, zeroExtNums) 1355 self.productProcessing([productID], productOfID, 'calcZP') 1356 1357 progress.testForOutput("Completed productID %s" % productID) 1358 1359 if not bestProduct.isTile(): 1360 self.finishOffProducts(newProducts, productOfID, componentsOfProd)
1361 1362 #-------------------------------------------------------------------------- 1363
1364 - def createTiles(self, newProducts, productOfID, componentsOfProd):
1365 """ 1366 """ 1367 # nebuliser 1368 1369 isCompleteDict = {} 1370 for productID in newProducts: 1371 isCompleteDict[productID] = self.redoHeader and \ 1372 productOfID[productID].imageExists() 1373 1374 processLayers = self.calcProcessLayers('nebuliser', newProducts, 1375 productOfID, componentsOfProd) 1376 Logger.addMessage("Filtering...") 1377 progress = ForLoopMonitor(processLayers) 1378 filtCompsOfProd = {} 1379 for productInfo, _isParts in processLayers: 1380 productIDs = [prodID for prodID, _partNum in productInfo] 1381 self.productProcessing(productIDs, productOfID, initialise=True) 1382 prodThreads = [] 1383 filterComps = [] 1384 for productID, prodPart in productInfo: 1385 if not isCompleteDict[productID]: 1386 inputImage = componentsOfProd[productID][prodPart] 1387 filteredImage = fits.getFilteredImageName(inputImage.imageName, 1388 directory=(self._tmpWorkDir if not self.archive.isRealRun 1389 and self.sysc.testOutputPath() not in inputImage.imageName 1390 else None)) 1391 1392 filterComps.append(Cu13.ProductDetails(inputImage.productID, 1393 inputImage.multiframeID, filteredImage, inputImage.confName, 1394 inputImage.catName)) 1395 1396 if prodPart == 5: # TODO: there could be 3 pawprints, not 6. 1397 filtCompsOfProd[productID] = filterComps 1398 filterComps = [] 1399 1400 prodThreads.append( 1401 multiprocessing.Process(target=self.filterImages, 1402 args=(inputImage, filteredImage))) 1403 1404 if productInfo: 1405 uniquePiDs = [productID for productID, pp in productInfo 1406 if pp == 5] 1407 1408 msg = "Filtering products: " + utils.numberRange(uniquePiDs) 1409 Logger.addMessage(msg, alwaysLog=False) 1410 self.runThreads(prodThreads) 1411 1412 # @TODO: Need to work if NOFFSETS=3 1413 # This only updates ProductProcessing if the last stack in the 1414 # tile (offset 5) has been completed 1415 self.productProcessing(uniquePiDs, productOfID, 'nebuliser') 1416 1417 msg = msg.replace('ing', 'ed') if productInfo else '' 1418 progress.testForOutput(msg) 1419 1420 # Now tile 1421 # @TODO: Would be more efficient to merge layers together! 1422 processLayers = [(pInfo, 1) for pInfo, _isParts in 1423 self.calcProcessLayers('mosaic', newProducts, 1424 productOfID, componentsOfProd)] 1425 processLayers += [(pInfo, 2) for pInfo, _isParts in 1426 self.calcProcessLayers('filtmos', newProducts, 1427 productOfID, componentsOfProd)] 1428 # @FIXME: Why does this often fall over after mosaicing, before filtmos? 1429 Logger.addMessage("Mosaicing...") 1430 progress = ForLoopMonitor(processLayers) 1431 for productInfo, mosType in processLayers: 1432 productIDs = [prodID for prodID, _partNum in productInfo] 1433 prodThreads = [] 1434 for productID in productIDs: 1435 bestProduct = productOfID[productID] 1436 isFiltered = False 1437 if mosType == 1: 1438 components = componentsOfProd[productID] 1439 else: 1440 isFiltered = True 1441 if productID in filtCompsOfProd: 1442 components = filtCompsOfProd[productID] 1443 else: 1444 filteredImageDict = dict([(inputImage.imageName, 1445 fits.getFilteredImageName(inputImage.imageName)) 1446 for inputImage in componentsOfProd[productID]]) 1447 1448 for imageName in filteredImageDict: 1449 if not filteredImageDict[imageName] \ 1450 and not self.archive.isRealRun: 1451 filteredImageDict[imageName] = \ 1452 fits.getFilteredImageName(imageName, 1453 directory=(self._tmpWorkDir 1454 if not self.archive.isRealRun 1455 and self.sysc.testOutputPath() 1456 not in imageName 1457 else None)) 1458 1459 components = [Cu13.ProductDetails(inputImage.productID, 1460 inputImage.multiframeID, 1461 filteredImageDict[inputImage.imageName], 1462 inputImage.confName, inputImage.catName) 1463 for inputImage in componentsOfProd[productID]] 1464 1465 prodThreads.append(multiprocessing.Process( 1466 target=self.tileImages, args=(components, bestProduct, 1467 isFiltered))) 1468 1469 if productIDs: 1470 msg = "Mosaicing products: " + utils.numberRange(productIDs) 1471 Logger.addMessage(msg, alwaysLog=False) 1472 self.runThreads(prodThreads) 1473 self.productProcessing(productIDs, productOfID, 1474 process=('mosaic' if mosType == 1 else 'filtmos')) 1475 1476 msg = msg.replace('ing', 'ked') if productIDs else '' 1477 progress.testForOutput(msg) 1478 1479 products = \ 1480 self.hasProcessBeenRun(newProducts, productOfID, 'prepHeaders') 1481 1482 if products: 1483 msg = ("all new products" if newProducts == products else 1484 "products " + utils.numberRange(products)) 1485 1486 Logger.addMessage("Updating headers for " + msg) 1487 1488 for productID in products: 1489 for inputImage in componentsOfProd[productID]: 1490 filteredImage = fits.getFilteredImageName(inputImage.imageName, 1491 directory=(self._tmpWorkDir if not self.archive.isRealRun 1492 and self.sysc.testOutputPath() not in inputImage.imageName 1493 else None)) 1494 1495 if os.path.exists(filteredImage): 1496 os.remove(filteredImage) 1497 1498 bestProduct = productOfID[productID] 1499 allFiles, expTimes = \ 1500 bestProduct.getImageDetails(componentsOfProd[productID]) 1501 allFiles = self.getOBLevel(allFiles, bestProduct) 1502 fits.prepHeaders(bestProduct.fileName(), bestProduct.confName(), 1503 allFiles, expTimes, 1504 dfsIDString=(self.programme.getAttr("dfsIDString") 1505 if self.sysc.isWSA() else None), 1506 bScale=(bestProduct.bScale 1507 if self.sysc.hasEsoRelease else None)) 1508 1509 self.productProcessing([productID], 1510 productOfID, 'prepHeaders') 1511 1512 # extract + wcsfit 1513 self.runInitialCasuExtraction(newProducts, productOfID, 1514 componentsOfProd) 1515 1516 # grout 1517 tmpGroutDir = self.sysc.tempWorkPath() 1518 shutil.rmtree(tmpGroutDir) 1519 utils.ensureDirExist(tmpGroutDir) 1520 processLayers = self.calcProcessLayers('grout', newProducts, 1521 productOfID, componentsOfProd) 1522 Logger.addMessage("Grouting...") 1523 progress = ForLoopMonitor(processLayers) 1524 for productInfo, _isParts in processLayers: 1525 productIDs = [prodID for prodID, _partNum in productInfo] 1526 prodThreads = [] 1527 if productIDs: 1528 Logger.addMessage("Setting up links for products: " 1529 + utils.numberRange(productIDs), alwaysLog=False) 1530 1531 for productID in productIDs: 1532 bestProduct = productOfID[productID] 1533 self.createSoftLinks(bestProduct, tmpGroutDir) 1534 catName = bestProduct.preGroutCatName() 1535 finalCatName = bestProduct.catName() 1536 prodThreads.append(multiprocessing.Process( 1537 target=self.groutCat, args=(catName, finalCatName, 1538 tmpGroutDir))) 1539 1540 if productIDs: 1541 msg = "Grouting catalogues for products: " 1542 msg += utils.numberRange(productIDs) 1543 Logger.addMessage(msg, alwaysLog=False) 1544 1545 self.runThreads(prodThreads) 1546 self.productProcessing(productIDs, productOfID, 'grout') 1547 1548 msg = msg.replace('ing', 'ed') if productIDs else '' 1549 progress.testForOutput(msg) 1550 shutil.rmtree(tmpGroutDir) 1551 # classify 1552 processLayers = self.calcProcessLayers('classify', newProducts, 1553 productOfID, componentsOfProd) 1554 1555 Logger.addMessage("Classifying...") 1556 progress = ForLoopMonitor(processLayers) 1557 for productInfo, _isParts in processLayers: 1558 productIDs = [prodID for prodID, _partNum in productInfo] 1559 prodThreads = [] 1560 for productID in productIDs: 1561 bestProduct = productOfID[productID] 1562 catName = bestProduct.catName() 1563 prodThreads.append(multiprocessing.Process( 1564 target=self.classifyCat, args=(catName, False))) 1565 1566 if productIDs: 1567 msg = "Classifying catalogues for products: " 1568 msg += utils.numberRange(productIDs) 1569 Logger.addMessage(msg, alwaysLog=False) 1570 1571 self.runThreads(prodThreads) 1572 self.productProcessing(productIDs, productOfID, 'classify') 1573 1574 msg = msg.replace('ying', 'ied') if productIDs else '' 1575 progress.testForOutput(msg) 1576 1577 # ZP 1578 products = self.hasProcessBeenRun(newProducts, productOfID, 'calcZP') 1579 if products: 1580 msg = ("all new products" if newProducts == products else 1581 "products " + utils.numberRange(products)) 1582 1583 Logger.addMessage("Calculating ZP and updating headers for " + msg) 1584 1585 progress = ForLoopMonitor(products) 1586 for productID in products: 1587 bestProduct = productOfID[productID] 1588 1589 # Remove temporary pre-grouted catalogues 1590 if bestProduct.isTile() \ 1591 and os.path.exists(bestProduct.preGroutCatName()): 1592 os.remove(bestProduct.preGroutCatName()) 1593 1594 Logger.addMessage("Calculating zero-point and updating headers...", 1595 alwaysLog=bestProduct.productType == 'mosaic') 1596 1597 fits.removeDuplicateKeywords(bestProduct.catName()) 1598 1599 # Update FITS headers 1600 updateHeaders(bestProduct, 1601 self.calcZP(bestProduct, componentsOfProd[productID]), 1602 pu.Provenance(self.archive, bestProduct.fileName(), 1603 componentsOfProd[productID])) 1604 1605 self.productProcessing([productID], productOfID, 'calcZP') 1606 progress.testForOutput("Completed productID %s" % productID) 1607 1608 self.finishOffProducts(newProducts, productOfID, componentsOfProd)
1609 1610 #-------------------------------------------------------------------------- 1611
1612 - def finishOffProducts(self, newProducts, productOfID, componentsOfProd):
1613 """ Last parts. 1614 """ 1615 products = self.hasProcessBeenRun(newProducts, productOfID, 'testProduct') 1616 if products: 1617 msg = ("all new products" if newProducts == products else 1618 "products " + utils.numberRange(products)) 1619 1620 Logger.addMessage("Testing products and updating headers for " + msg) 1621 1622 progress = ForLoopMonitor(products) 1623 for productID in products: 1624 bestProduct = productOfID[productID] 1625 self.testProduct(bestProduct, componentsOfProd[productID]) 1626 self.productProcessing([productID], productOfID, 'testProduct') 1627 progress.testForOutput("Completed productID %s" % productID) 1628 1629 prevProducts = products 1630 products = self.hasProcessBeenRun(newProducts, productOfID, 'complete') 1631 if products: 1632 msg = ("the same products" if prevProducts == products else 1633 "products " + utils.numberRange(products)) 1634 1635 Logger.addMessage("Compressing files and finishing off for " + msg) 1636 1637 progress = ForLoopMonitor(products) 1638 for productID in products: 1639 bestProduct = productOfID[productID] 1640 self.compressProd(bestProduct) 1641 self.productProcessing([productID], productOfID, isComplete=True) 1642 progress.testForOutput("Completed productID %s" % productID)
1643 1644 #-------------------------------------------------------------------------- 1645
1646 - def createMosaics(self, newProducts, productOfID, componentsOfProd):
1647 """ Create mosaics 1648 """ 1649 # Convert mosaics first (can this be parallised?) 1650 for productID in newProducts: 1651 Logger.addMessage("Creating mosaic, product %s" % productID) 1652 bestProduct = productOfID[productID] 1653 self.createMosaic(bestProduct, componentsOfProd[productID]) 1654 # Extract catalogues - (can this be parallised?) 1655 for productID in newProducts: 1656 bestProduct = productOfID[productID] 1657 dualProduct = None 1658 if bestProduct.getDualProductID(): 1659 dualProduct = self.getDualImageData(bestProduct) 1660 Logger.addMessage("Got dual image data", alwaysLog=False) 1661 components = bestProduct.intComponents 1662 Logger.addMessage("Extracting catalogue") 1663 self.createCatalogue(bestProduct, components, dualProduct)
1664 1665 #-------------------------------------------------------------------------- 1666
1667 - def fitWCS(self, newProducts, productOfID, componentsOfProd, 1668 inParallel=False, areZeroSizeExtsDict={}):
1669 """ Use wcsfit to do astrometry 1670 """ 1671 Logger.addMessage("Fitting astrometry...") 1672 processLayers = self.calcProcessLayers('wcsfit', newProducts, 1673 productOfID, componentsOfProd) 1674 progress = ForLoopMonitor(processLayers) 1675 for productInfo, _isParts in processLayers: 1676 productIDs = [prodID for prodID, _partNum in productInfo] 1677 msg = "Fitting astrometry to products: " 1678 msg += utils.numberRange(productIDs) 1679 Logger.addMessage(msg, alwaysLog=False) 1680 1681 prodThreads = [] 1682 threadNo = 1 1683 for productID in productIDs: 1684 bestProduct = productOfID[productID] 1685 if inParallel: 1686 prodThreads.append( 1687 multiprocessing.Process(target=self.wcsfit, 1688 args=(bestProduct, threadNo, areZeroSizeExtsDict[productID]))) 1689 threadNo += 1 1690 else: 1691 self.wcsfit(bestProduct, threadNo, areZeroSizeExtsDict[productID]) 1692 self.productProcessing([productID], productOfID, 'wcsfit') 1693 1694 if inParallel and productIDs: 1695 self.runThreads(prodThreads) 1696 self.productProcessing(productIDs, productOfID, 'wcsfit') 1697 for thdNo in range(threadNo - 1): 1698 # Delete temporary folders 1699 shutil.rmtree(os.path.join(os.getcwd(), "thread%s_%s" % 1700 (thdNo + 1, self.cuEventID))) 1701 1702 progress.testForOutput(msg.replace('ing', 'ed'))
1703 1704 #-------------------------------------------------------------------------- 1705
1706 - def testFits(self, fileName, processName):
1707 """ 1708 """ 1709 failed = False 1710 if os.path.exists(fileName): 1711 stdout = extp.run('fverify %s' % fileName, 1712 ignoreMsgs=['Problem saving parameter file', 1713 'for writing']) 1714 index = 0 1715 for ii, line in enumerate(stdout): 1716 if line.startswith('HDU#'): 1717 index = ii 1718 hduNo = 0 1719 1720 if ii > index and index > 0: 1721 if len(line) > 0 and line[0].isdigit(): 1722 hduNo += 1 1723 if int(line.split()[len(line.split()) - 1]) > 0: 1724 failed = True 1725 Logger.addMessage( 1726 "Corrupt file %s, ext %s in process %s " 1727 % (fileName, hduNo, 1728 processName if processName else 'NONE')) 1729 1730 if self.sysc.tileSuffix not in fileName \ 1731 and self.sysc.mosaicSuffix not in fileName \ 1732 and hduNo < self.sysc.maxHDUs: 1733 failed = True 1734 Logger.addMessage("Fewer extensions %s in process %s" 1735 % (fileName, 1736 processName if processName else 'NONE')) 1737 1738 return failed
1739 1740 #-------------------------------------------------------------------------- 1741
1742 - def productProcessing(self, productIDList, productOfID, process=None, 1743 initialise=False, isComplete=False):
1744 """ 1745 """ 1746 doAdd = False 1747 for productID in productIDList: 1748 bestProduct = productOfID[productID] 1749 fileName = bestProduct.fileName() 1750 # @TODO: Does this slow it down too much 1751 hasFailed = self.testFits(fileName, process) 1752 productType = bestProduct.productType 1753 if initialise: 1754 bitWiseStatus = 0 1755 doAdd = True 1756 elif isComplete: 1757 bitWiseStatus = 2 ** (self.bitWiseProcessingDict['complete']) 1758 else: 1759 bitWiseStatus = 2 ** (self.bitWiseProcessingDict[process]) 1760 doAdd = True 1761 1762 if not hasFailed and self.archive.queryEntriesExist("ProductProcessing", 1763 "programmeID=%s and productType='%s' and productID=%s " 1764 "and fileName='%s'" % (self.programmeID, productType, productID, 1765 fileName)): 1766 self.archive.update("ProductProcessing", 1767 [("bitProcessingFlag", ("bitProcessingFlag+%s" % bitWiseStatus 1768 if doAdd else bitWiseStatus))], 1769 where="programmeID=%s AND productType='%s' AND productID=%s " 1770 "AND fileName='%s'" % (self.programmeID, productType, 1771 productID, fileName)) 1772 elif not hasFailed: 1773 self.archive.insertData("ProductProcessing", 1774 (self.programmeID, productType, productID, fileName, 1775 bitWiseStatus), enforcePKC=True) 1776 else: 1777 raise Cu13.CuError("Corrupt file: " + fileName)
1778 #Logger.addMessage("<Warning> Corrupt file %s, will delete and redo later" % fileName) 1779 #if os.path.exists(fileName): 1780 # os.remove(fileName) 1781 #if os.path.exists(fits.getConfMap(fileName)): 1782 # os.remove(fits.getConfMap(fileName)) 1783 #if os.path.exists(fits.getCatalogue(fileName)): 1784 # os.remove(fits.getCatalogue(fileName)) 1785 #self.archive.delete("ProductProcessing", 1786 # whereStr="programmeID=%s AND productType='%s' AND productID=%s " 1787 # "AND fileName='%s'" % (self.programmeID, productType, 1788 # productID, fileName)) 1789 1790 #-------------------------------------------------------------------------- 1791
1792 - def runThreads(self, threads):
1793 """ Executes given list of thread processes. 1794 """ 1795 try: 1796 for thread in threads: 1797 thread.start() 1798 finally: 1799 threadFail = False 1800 for thread in threads: 1801 if threadFail: 1802 thread.terminate() 1803 thread.join() 1804 else: 1805 thread.join() 1806 threadFail = thread.exitcode 1807 if threadFail: 1808 Logger.addMessage("<ERROR> Thread failed. Terminating all" 1809 " other running threads.") 1810 1811 if threadFail: 1812 self.tidyUp() 1813 raise Cu13.CuError("Forked processes failed. Please check stdout.")
1814 1815 #-------------------------------------------------------------------------- 1816
1817 - def tidyUp(self):
1818 """ 1819 """ 1820 Logger.addMessage("Tidying up...THIS PART OF CODE STILL NEEDS TO BE COMPLETED")
1821 1822 #-------------------------------------------------------------------------- 1823 1894 1895 #-------------------------------------------------------------------------- 1896
1897 - def createMosaic(self, bestProduct, components):
1898 """ 1899 Creates the deep mosaic of given product ID in the output directory when 1900 supplied a list of component images (from L{queries.getStacks()}). 1901 1902 @return: List of new deep product files created. 1903 @rtype: list(str) 1904 1905 """ 1906 1907 # @TODO: low priority make parallel. 1908 if bestProduct.external: 1909 productID = bestProduct.deepProductTable["productID"][bestProduct.index] 1910 prodOfID = {productID:bestProduct} 1911 processLayers = self.calcProcessLayers('imageproc', [productID], prodOfID, None) 1912 Logger.addMessage("Querying database for metadata...", alwaysLog=False) 1913 provInfoObj = Provenance(self, bestProduct) 1914 bestProduct.compProductType = provInfoObj.intProdType 1915 bestProduct.intComponents = provInfoObj.getComponents() 1916 # nToBeConverted = len(set([prodID for productInfo, _isParts in processLayers for prodID, _nParts in productInfo])) 1917 # progress = ForLoopMonitor(processLayers) 1918 # Logger.addMessage("Converting %s stacks..." % nToBeStacked) 1919 Logger.addMessage("Mosaicing image: %s from %s" 1920 % (bestProduct.fileName(), components), alwaysLog=False) 1921 for productInfo, _isParts in processLayers: 1922 productIDs = [prodID for prodID, _partNum in productInfo] 1923 if productIDs: 1924 self.productProcessing(productIDs, prodOfID, initialise=True) 1925 self.modifyExternalMosaic(bestProduct, components, provInfoObj) 1926 self.isCompressed = False 1927 self.productProcessing(productIDs, prodOfID, 'imageproc') 1928 else: 1929 raise Cu13.CuError("No support for CASU tool yet.")
1930 1931 #-------------------------------------------------------------------------- 1932
1933 - def modifyExternalMosaic(self, bestProduct, components, provInfoObj):
1934 """ 1935 Sets up attributes to convert an external mosaic into the 1936 correct format: MEF, with the correct headers for CU3 to ingest 1937 into the science archive. 1938 1939 """ 1940 if not (components and os.path.exists(bestProduct.provFile())): 1941 raise Cu13.CuError("External mosaics have not been imported or " 1942 "set up correctly. If they have been imported " 1943 "add a line into ExternalProduct and run " 1944 "cu13.convertProvInfo.py") 1945 1946 bestProduct.imageName = components[0][1] 1947 bestProduct.weightName = components[0][2] 1948 prodID = bestProduct.deepProductTable["productID"][bestProduct.index] 1949 Logger.addMessage("Converting mosaic productID %s in %s" 1950 % (prodID, 1951 ','.join(map(str, bestProduct.deepProductTable["productID"])))) 1952 1953 1954 1955 # convert into mef if necessary and sort out headers 1956 outputFiles = [(bestProduct.fileName(), True), 1957 (bestProduct.confName(), False)] 1958 1959 # normalise weight image 1960 normalFactor = (1 if self.extractor != 'CASU' else 1961 getNormalisationConst(bestProduct)) 1962 1963 for imageName, isImage in outputFiles: 1964 self.convertMosaic(bestProduct, imageName, isImage, provInfoObj, 1965 normalFactor) 1966 1967 1968 1969 Logger.addMessage("Finished converting mosaic productID %s" 1970 % (prodID))
1971 1972 #-------------------------------------------------------------------------- 1973
1974 - def convertMosaic(self, mosaicObj, outputFileName, isImage, 1975 provObj, normalFactor):
1976 """ 1977 Converts a mosaic to MEF and adds the correct primary and extension 1978 header. 1979 1980 """ 1981 if not (os.path.exists(outputFileName)): 1982 Logger.addMessage("Converting mosaic %s %s..." % 1983 ("image" if isImage else "confidence map", outputFileName)) 1984 # @TODO: Check to see if this part has already been done. 1985 # use ProductProcessing 1986 inputFileName = mosaicObj.imageName if isImage \ 1987 else mosaicObj.weightName 1988 # Copy image and get header info 1989 if not mosaicObj.isMef(inputFileName): 1990 if not self.archive.isTrialRun: 1991 if not self.redoHeader: 1992 origImagHeader = [convertSFtoMEF(inputFileName, 1993 outputFileName, isImage, self.extractor, normalFactor)] 1994 1995 os.chmod(outputFileName, 0644) 1996 else: 1997 origImagHeader = [fits.open(inputFileName)[0].header] 1998 else: 1999 if not self.archive.isTrialRun: 2000 if not self.redoHeader: 2001 shutil.copyfile(inputFileName, outputFileName) 2002 hdulist = fits.open(outputFileName, 'update') 2003 origImagHeader = [hdulist[0].header, hdulist[1].header] 2004 if not isImage and self.extractor == 'CASU': 2005 hdulist[1].data = numpy.multiply( 2006 hdulist[1].data, normalFactor).astype( 2007 numpy.uint8) 2008 fits.checkForUnicodeError(hdulist, outputFileName) 2009 hdulist.close() 2010 else: 2011 hdulist = fits.open(inputFileName) 2012 origImagHeader = [hdulist[0].header, hdulist[1].header] 2013 hdulist.close() 2014 2015 # Now fix up headers 2016 if not self.archive.isTrialRun: 2017 hdulist = fits.open(outputFileName, 'update') 2018 hdulist[0].header = self.getPrimaryHeader(mosaicObj, 2019 hdulist[0].header, origImagHeader[0], isImage, provObj) 2020 2021 fits.checkForUnicodeError(hdulist, outputFileName) 2022 if mosaicObj.isMef(): 2023 hdulist[1].header = self.getInitImageHeader( 2024 hdulist[1].header, origImagHeader[len(origImagHeader) - 1], 2025 provObj) 2026 2027 fits.checkForUnicodeError(hdulist, outputFileName) 2028 hdulist[1].header = self.getImageHeader(mosaicObj, 2029 hdulist[1].header, isImage, provObj) 2030 fits.checkForUnicodeError(hdulist, outputFileName) 2031 hdulist.close()
2032 2033 #-------------------------------------------------------------------------- 2034
2035 - def findContents(self):
2036 """ 2037 Checks ProductProcessing to see if there are partially processed 2038 products: sets directory date 2039 2040 """ 2041 compProds = self.archive.query("productType,productID,fileName", 2042 "ProductProcessing", 2043 "programmeID=%s and bitProcessingFlag&%s!=0" 2044 % (self.programmeID, 2045 2 ** (Cu13.bitWiseProcessingDict['complete']))) 2046 2047 startProds = self.archive.query("productType,productID,fileName", 2048 "ProductProcessing", 2049 "programmeID=%s and bitProcessingFlag&%s!=1 " 2050 "and bitProcessingFlag>0" 2051 % (self.programmeID, 2052 2 ** (Cu13.bitWiseProcessingDict['complete']))) 2053 2054 filtTileProds = self.archive.query("productType,productID,fileName", 2055 "ProductProcessing", 2056 "programmeID=%s and bitProcessingFlag=%s and " 2057 "productType ='tile' " 2058 % (self.programmeID, 2059 2 ** (Cu13.bitWiseProcessingDict['nebuliser']))) 2060 2061 for pType, productID, prodFilePath in compProds: 2062 if (os.path.exists(prodFilePath) and 2063 os.path.exists(fits.getConfMap(prodFilePath)) and 2064 os.path.exists(fits.getCatalogue(prodFilePath))): 2065 self.newProductsOfType[pType][productID] = \ 2066 Cu13.ProductDetails(productID, 2067 dbc.intDefault(), prodFilePath, 2068 fits.getConfMap(prodFilePath), 2069 fits.getCatalogue(prodFilePath)) 2070 2071 self.prodDirectory[pType][productID] = \ 2072 os.path.dirname(prodFilePath) 2073 2074 for pType, productID, prodFilePath in startProds: 2075 if (os.path.exists(prodFilePath)): 2076 self.startedProductsOfType[pType][productID] = \ 2077 Cu13.ProductDetails(productID, 2078 dbc.intDefault(), prodFilePath, 2079 fits.getConfMap(prodFilePath), 2080 fits.getCatalogue(prodFilePath)) 2081 2082 self.prodDirectory[pType][productID] = \ 2083 os.path.dirname(prodFilePath) 2084 2085 for pType, productID, prodFilePath in filtTileProds: 2086 # @TODO: have to repeat filtering this way 2087 # Use productLinks 2088 stackProdIDs = self.archive.query( 2089 "intProductID", 2090 "ProductLinks", 2091 "combiProductID=%s AND programmeID=%s AND combiProdType='tile'" 2092 " AND intProdType='stack'" % (productID, self.programmeID)) 2093 # Find stacks - 2094 frameSel = queries.getFrameSelection('stack', noDeeps=False, 2095 deepOnly=False, selType='%stack') 2096 fileNames = self.archive.query( 2097 "fileName,productID", 2098 "ProgrammeFrame as p,Multiframe as m", 2099 "p.programmeID=%s AND p.multiframeID=m.multiframeID AND %s AND" 2100 " %s AND productID IN (%s)" % (self.programmeID, frameSel, 2101 DepCodes.selectNonDeprecated, ','.join(map(str, stackProdIDs)))) 2102 foundProds = [pID for fName, pID in fileNames if os.path.exists( 2103 os.path.join(os.path.dirname(fName), 2104 self.sysc.filtPrefix + os.path.basename(fName)))] 2105 2106 missingProds = [prodID for prodID in stackProdIDs 2107 if prodID not in foundProds] 2108 2109 fileNames += self.archive.query( 2110 "fileName,productID", 2111 "ProductProcessing", 2112 "programmeID=%s AND productType='stack' AND productID IN (%s)" 2113 % (self.programmeID, ','.join(map(str, missingProds)))) 2114 2115 foundProds = [pID for fName, pID in fileNames if os.path.exists( 2116 os.path.join(os.path.dirname(fName), 2117 self.sysc.filtPrefix + os.path.basename(fName)))] 2118 2119 missingProds = [prodID for prodID in stackProdIDs 2120 if prodID not in foundProds] 2121 2122 # check stacks that make up tile??? 2123 if not missingProds: 2124 self.startedProductsOfType[pType][productID] = \ 2125 Cu13.ProductDetails(productID, 2126 dbc.intDefault(), prodFilePath, 2127 fits.getConfMap(prodFilePath), 2128 fits.getCatalogue(prodFilePath)) 2129 2130 self.prodDirectory[pType][productID] = \ 2131 os.path.dirname(prodFilePath)
2132 2133 #-------------------------------------------------------------------------- 2134
2135 - def getPrimaryHeader(self, bestProd, prihdr, originalHeader, isImage, 2136 provObj, addOrig=True):
2137 """ 2138 This produces all the relevant info for the primary header. 2139 2140 @return: Primary header for mosaic 2141 @rtype: pyfits.header 2142 2143 """ 2144 # Add in original header values. 2145 prihdr = createNewHeader(prihdr, 'primary') 2146 2147 # Add in additional info, if need be. 2148 cards = originalHeader.ascard 2149 if addOrig: 2150 for card in cards: 2151 if checkList(card.key) == 1: 2152 try: 2153 prihdr.update(card.key, card.value, card.comment) 2154 except KeyError: 2155 pass 2156 2157 # Zeropoint keywords should be in image extension 2158 for keyword in ('MAGZPT', 'MAGZRR'): 2159 try: 2160 del prihdr[keyword] 2161 except KeyError: 2162 pass 2163 2164 provInfo = provObj.getProvInfo() 2165 prihdr.update('PROJECT', provObj.getConstDict()['PROJECT'], 2166 'Time-allocation code') 2167 2168 prihdr.update('OBSNUM', -9999, 'Observation number') 2169 prihdr.update('FILTER', provInfo.get('filterName'), 'Filter name') 2170 prihdr.update('UTDATE', provInfo.get('utDate'), 2171 'UT date as integer in yyyymmdd format') 2172 2173 prihdr.update('MJD-OBS', provInfo.get('mjdObs'), 2174 'DATE-OBS as Modified Julian Date') 2175 2176 prihdr.update('WSA_TIME', '', 'WSA time stamp') 2177 prihdr.update('WSA_MFID', 0, 'WSA MultiframeID') 2178 if self.sysc.hasMicroStep: 2179 prihdr.update('NUSTEP', provInfo.get('nustep'), 'Number of microsteps') 2180 2181 # add those used by CU3.py 2182 if 'DATE' in cards.keys(): 2183 prihdr.update('DATE', originalHeader['DATE'], 'Creation Date') 2184 2185 prihdr.update('TELESCOP', provObj.getConstDict()['TELESCOP'], 2186 'Telescope name') 2187 prihdr.update('INSTRUME', provObj.getConstDict()['INSTRUME'], 2188 'Instrument') 2189 prihdr.update('OBSERVER', provObj.getConstDict()['OBSERVER'], 2190 'Observers names') 2191 prihdr.update('RECIPE', provObj.getConstDict()['RECIPE'], 2192 'Data reduction recipe to be used') 2193 prihdr.update('OBSTYPE', 'OBJECT' if isImage else 'CONFIDENCEMAP', 2194 'BIAS|DARK|SKYFLAT|DOMEFLAT|OBJECT|SKY|FOCUS') 2195 2196 if self.sysc.isWSA(): 2197 prihdr.update('DHSVER', provObj.getConstDict()['DHSVER'], 2198 'Data handling version') 2199 prihdr.update('HDTFILE', provObj.getConstDict()['HDTFILE'], 2200 'Name of global hdt file') 2201 prihdr.update('USERID', provObj.getConstDict()['USERID'], 2202 'Userid logged in as') 2203 prihdr.update('OBSREF', provObj.getConstDict()['OBSREF'], 2204 'PATT or other reference') 2205 prihdr.update('DATE-END', provInfo.get('dateEnd'), 2206 'Date time (UTC) of end of observation') 2207 prihdr.update('TRACKSYS', provObj.getConstDict()['TRACKSYS'], 2208 'Telescope tracking coordinate system') 2209 prihdr.update('READMODE', provObj.getConstDict()['READMODE'], 2210 'Name of camera readmode') 2211 prihdr.update(self.sysc.expTimeKeyWord, provInfo.get('expTime'), 2212 '[s] Integration time per exposure') 2213 prihdr.update('NEXP', provObj.getConstDict()['NEXP'], 2214 'Number of exposures in integration') 2215 prihdr.update('NINT', provObj.getConstDict()['NINT'], 2216 'Number of integrations in observation') 2217 prihdr.update('READINT', provObj.getConstDict()['READINT'], 2218 '[s] Interval between reads') 2219 prihdr.update('NREADS', provObj.getConstDict()['NREADS'], 2220 'Number of reads per exposure') 2221 prihdr.update('RABASE', provInfo.get('raBase'), 2222 '[h] Right ascension of base position') 2223 2224 prihdr.update('DECBASE', provInfo.get('decBase'), 2225 '[deg] Declination of base position') 2226 elif self.sysc.hasEsoRelease: 2227 prihdr.update("HIERARCH ESO OBS PROG ID", 2228 provObj.getConstDict()['HIERARCH ESO OBS PROG ID'], 2229 "ESO programme ID") 2230 prihdr.update("HIERARCH ESO INS FILT1 NAME", 2231 provInfo.get('filterName').title(), "ESO FILTER NAME") 2232 2233 prihdr.update('RA', 15. * provInfo.get('raBase'), 2234 '[deg] Right ascension of base position') 2235 prihdr.update('HIERARCH ESO TEL POSANG', 2236 provInfo.get('posAngle'), "ESO POSITION ANGLE") 2237 prihdr.update('DEC', provInfo.get('decBase'), 2238 '[deg] Declination of base position') 2239 # ALSO ESO HIERARCH KEYS ... 2240 # @FIXME: NOT CORRECT -- HHMMSS.TTT DDMMSS.TTT 2241 tileRaHms, tileDecDms = astro.convertCoordEso(15.*provInfo.get('raBase'), 2242 provInfo.get('decBase')) 2243 2244 prihdr.update('HIERARCH ESO OCS SADT TILE RA', tileRaHms, 2245 'SADT tile RA') 2246 prihdr.update('HIERARCH ESO OCS SADT TILE DEC', tileDecDms, 2247 'SADT tile dec') 2248 prihdr.update('HIERARCH ESO OCS SADT TILE OFFANGLE', 2249 provInfo.get('posAngle'), "SADT tile rotator offset angle") 2250 2251 prihdr.update('HIERARCH ESO DPR TECH', 'IMAGE;JITTER', 'Observation technique') 2252 prihdr.update('STANDARD', False, 2253 'Is the target a standard star observation?') 2254 2255 prihdr.update('DATE-OBS', provInfo.get('dateObs'), 2256 'Date time (UTC) of start of observation') 2257 2258 prihdr.update('WCS_AXES', 2, 2259 'Number of axes in world co-ordinate system') 2260 2261 radesysKey = 'RADESYS' if self.sysc.isWSA() else 'RADECSYS' 2262 try: 2263 prihdr.update(radesysKey, originalHeader['RADECSYS'], 2264 'Mean IAU 1984 equatorial co-ordinates') 2265 except KeyError: 2266 prihdr.update(radesysKey, originalHeader['RADESYS'], 2267 'Mean IAU 1984 equatorial co-ordinates') 2268 2269 prihdr.update('EQUINOX', originalHeader['EQUINOX'], 2270 '[yr] Equinox of object position') 2271 2272 prihdr.update('EXTINCT', 0.0, 'Extinction') 2273 prihdr.update(self.sysc.amStart, 1.0, 'Airmass at start of observation') 2274 prihdr.update(self.sysc.amEnd, 1.0, 'Airmass at end of observation') 2275 2276 prihdr.update('PRODID', 2277 bestProd.deepProductTable["productID"][bestProd.index], 2278 'Product ID of deep image') 2279 2280 prihdr.update('RELNUM', self.releaseNum, 2281 'Release number of deep image') 2282 2283 return prihdr
2284 2285 #-------------------------------------------------------------------------- 2286
2287 - def getInitImageHeader(self, imghdr, originalHeader, provObj):
2288 """ 2289 This produces all the relevant astrometric info for the 2290 second header. 2291 2292 @return: Extension header for mosaic 2293 @rtype: pyfits.header 2294 2295 """ 2296 imghdr = createNewHeader(imghdr, 'exten') 2297 2298 return astrometry(imghdr, originalHeader, provObj.getConstDict())
2299 2300 #-------------------------------------------------------------------------- 2301
2302 - def getImageHeader(self, bestProd, imghdr, isImage, provObj):
2303 """ 2304 This produces all the relevant info for the second header in mosaic. 2305 2306 @return: Extension header for mosaic 2307 @rtype: pyfits.header 2308 2309 """ 2310 bestProd.getPointInfo() 2311 x = bestProd.nPointings * bestProd.nExtensions 2312 totalExpTime = sum(tExpTime * utils.getNumberItems(eL) / x 2313 for _fN, _ut, _dO, _dE, _mO, _ra, _dc, _fI, _dp, eL, tExpTime 2314 in provObj.getInputStacks()) 2315 2316 imghdr.update('SKYSUB', '') 2317 imghdr.update('CAMNUM', 1, 'Number of WFCAM camera (1, 2, 3 or 4)') 2318 imghdr.update('FLATCOR', '') 2319 if isImage: 2320 imageName = bestProd.fileName() 2321 imghdr.update('CIR_CPM', os.path.basename(bestProd.confName()), 2322 'Confidence Map') 2323 else: 2324 imageName = bestProd.confName() 2325 2326 # Add in additional header values. 2327 imghdr.update('DETECTOR', provObj.getConstDict()['DETECTOR'], 2328 'Type of detector array used') 2329 imghdr.update('DETECTID', provObj.getConstDict()['DETECTID'], 2330 'Serial number of detector array') 2331 imghdr.update('DROWS', provObj.getConstDict()['DROWS'], 2332 '[pixel] Number of detector rows') 2333 imghdr.update('DCOLUMNS', provObj.getConstDict()['DCOLUMNS'], 2334 '[pixel] Number of detector columns') 2335 2336 # update astrometry keywords form header 2337 imghdr.update('DARKCOR', '') 2338 2339 # confidence image data into outImage 2340 if isImage: 2341 provInfo = provObj.getProvInfo() 2342 imghdr.update('CIRMED', provInfo.get('background'), 2343 'Latest estimate of background ') 2344 2345 imghdr.update('CIR_BVAR', provInfo.get('sd'), 2346 'Latest estimate of background variance') 2347 2348 if self.sysc.isWSA(): 2349 imghdr.update('DECURTN', provObj.getConstDict()['DECURTN'], 2350 'De-curtaining done') 2351 imghdr.update('READNOIS', provObj.getConstDict()['READNOIS'], 2352 'Readnoise estimate') 2353 imghdr.update('HDTFILE2', provObj.getConstDict()['HDTFILE2'], 2354 'Name of camera-specific hdt file') 2355 imghdr.update('GAIN', provObj.getConstDict()['GAIN'], 2356 '[electrons/ADU] Detector gain') 2357 imghdr.update('RDOUT_X1', provObj.getConstDict()['RDOUT_X1'], 2358 'Start column of array readout') 2359 imghdr.update('RDOUT_X2', provObj.getConstDict()['RDOUT_X2'], 2360 'End column of array readout') 2361 imghdr.update('RDOUT_Y1', provObj.getConstDict()['RDOUT_Y1'], 2362 'Start row of array readout') 2363 imghdr.update('RDOUT_Y2', provObj.getConstDict()['RDOUT_Y2'], 2364 'End row of array readout') 2365 imghdr.update('PIXLSIZE', provObj.getConstDict()['PIXLSIZE'], 2366 '[arcimg] Pixel size') 2367 if self.sysc.hasEsoRelease: 2368 imghdr.update('GAINCOR', provObj.getConstDict()['GAINCOR'], 2369 '[electrons/ADU] Detector gain') 2370 imghdr.update('HIERARCH ESO DET CHIP NO', 2371 provObj.getConstDict()['HIERARCH ESO DET CHIP NO'], 2372 'Camera num of extension') 2373 imghdr.update('SKYLEVEL', 0., 'Median sky brightness (counts/pixel)') 2374 imghdr.update('SKYNOISE', 0., 'Pixel noise at sky level (counts)') 2375 imghdr.update('MAGZPT', dbc.realDefault(), 2376 'Zeropoint') 2377 imghdr.update('MAGZRR', 0.0, 'Zeropoint Error') 2378 imghdr.update('EXTINCT', 0.0, 'Extinction') 2379 imghdr.update('TEXPTIME', totalExpTime, 2380 'Total exposure time in this detector (s)') 2381 2382 inputStacks = provObj.getInputStacks() 2383 imghdr.update('PROV0000', os.path.basename(imageName), 'Output file') 2384 if isImage: 2385 for ii in range(len(inputStacks)): 2386 prov = 'PROV' + str("%04d" % (ii + 1)) 2387 if len(inputStacks) >= 10: 2388 imgName = os.path.join(os.path.basename(os.path.dirname( 2389 inputStacks[ii][0])), os.path.basename( 2390 inputStacks[ii][0] + '[' + inputStacks[ii][9] + ']')) 2391 else: 2392 imgName = os.path.join(os.path.basename(os.path.dirname( 2393 inputStacks[ii][0])), os.path.basename( 2394 inputStacks[ii][0] + '[' + inputStacks[ii][1] + ']')) 2395 comment = 'Card #' + str(ii + 1) 2396 imghdr.update(prov, imgName, comment) 2397 else: 2398 for ii in range(len(inputStacks)): 2399 prov = 'PROV' + str("%04d" % (ii + 1)) 2400 idx = (9 if len(inputStacks) >= 10 else 1) 2401 imgName = os.path.join( 2402 os.path.basename(os.path.dirname(inputStacks[ii][0])), 2403 os.path.basename(fits.getConfMap(inputStacks[ii][0]) + 2404 "[%s]" % inputStacks[ii][idx])) 2405 comment = 'Card #' + str(ii + 1) 2406 imghdr.update(prov, imgName, comment) 2407 2408 return imghdr
2409 2410 #-------------------------------------------------------------------------- 2411
2412 - def calcZP(self, bestProd, components, isGrt=False):
2413 """ Calculate ZP of catalogue. 2414 """ 2415 Logger.addMessage("Retrieving photometry information...", 2416 alwaysLog=False) 2417 2418 # Get photometry for each component: 2419 components = list(components) # Components must be an ordered list! 2420 mfIDs = list(map(attrgetter("multiframeID"), components)) 2421 2422 2423 # @TODO: self.poorDets 2424 2425 intermediateStackPhotometry, detectorCounts, poorPhotList = \ 2426 self.getArchivePhotometry(mfIDs) 2427 # read directly from catalogue for components not in database 2428 self.poorDets = [] 2429 intermediateStackPhotometry += \ 2430 self.getCataloguePhotometry(components, detectorCounts, 2431 bestProd.fTable) 2432 2433 intermediateStackPhotometry = numpy.array(intermediateStackPhotometry) 2434 2435 compName = [comp.imageName for comp in components if comp.imageName][0] 2436 intExtList = \ 2437 self.sysc.extListOfType(frameType=fits.determineFrameType(compName)) 2438 2439 compDict = dict((compID, compData.imageName) 2440 for compID, compData in enumerate(components)) 2441 2442 intData = [] 2443 for compID in range(len(components)): 2444 if components[compID].imageName: 2445 phtDataDict = calib.getIntermedPhotometry(compID, 2446 detectorCounts, intermediateStackPhotometry, intExtList, poorPhotList) 2447 2448 if phtDataDict: 2449 intData.append((compID, phtDataDict)) 2450 else: 2451 Logger.addMessage("<WARNING> Component %s not found for" 2452 " product %s: possibly we don't have the reprocessed file." 2453 % (compDict[compID], bestProd.fileName())) 2454 2455 # get photometry for deep image 2456 deepData = self.getFitsCatData(bestProd, isGrt) 2457 # ALSO RETURN intermediate stack delta ZPs 2458 2459 zpDict = calib.checkAstrometryPhotometry(intData, deepData, 2460 (bestProd.isMosaic() or 2461 bestProd.isTile()), compDict, 2462 poorerZP=self.poorDets) 2463 2464 if bestProd.isExternal(): 2465 # Compare ZP to calcZP 2466 zp = bestProd.getZeropoint() 2467 for extNum in zpDict: 2468 calcZP, calcZPErr, numZP = zpDict[extNum] 2469 deltaZP = calcZP - zp 2470 if abs(deltaZP) > 3 * calcZPErr: 2471 Logger.addMessage("External %s, %s, extNum %s, " 2472 "should have ZP=%s mag, but " 2473 "calculated ZP=%s mag" % 2474 (self.pType, bestProd.fileName(), extNum, 2475 round(zp, 3), round(calcZP, 3))) 2476 zpDict[extNum] = (zp, calcZPErr, numZP) 2477 2478 return zpDict
2479 2480 #-------------------------------------------------------------------------- 2481
2482 - def getArchivePhotometry(self, mfIDs):
2483 """ 2484 Get archived photometry with latest calibration. 2485 2486 @return: List of good stellar like sources with photometry errors 2487 better than limit. 2488 @rtype: list(tuple), list(tuple) 2489 2490 """ 2491 if not (max(mfIDs) > 0): # Don't query DB if no data in DB 2492 return [], [], [] 2493 2494 detector = "%s.multiframeID, %s.extNum" \ 2495 % (self.programme.getDetectionTable(), 2496 self.programme.getDetectionTable()) 2497 2498 detJoin = Join([self.programme.getDetectionTable(), 2499 self.programme.getAstrometryTable(), 2500 self.programme.getPhotometryTable()], 2501 ["multiframeID", "extNum", "seqNum"]) 2502 2503 compIDofMfID = \ 2504 dict((mfID, compID) for compID, mfID in enumerate(mfIDs) if mfID > 0) 2505 # @TODO: class IN (-1,-2) -- no class selection, but keep class in for later? 2506 # 2507 predicates = ("aperMag%sErr BETWEEN 0 AND %s AND class IN (-1,-2) AND " 2508 "ppErrBits=0 AND %s.multiframeID IN (%s)" 2509 % (Cu13.aperture[self.programme.getAcronym().upper()], 2510 self.sysc.phtErrLimit, 2511 self.programme.getDetectionTable(), 2512 ','.join(map(str, compIDofMfID)))) 2513 listPoor = [] 2514 if not self.isDetTableTooBig: 2515 photometry = self.archive.query( 2516 selectStr=detector + ", ra, dec, aperMag%s, aperMag%sErr" 2517 % (Cu13.aperture[self.programme.getAcronym().upper()], 2518 Cu13.aperture[self.programme.getAcronym().upper()]), 2519 fromStr=detJoin, 2520 whereStr=predicates, 2521 orderBy=detector + ", dec") 2522 2523 photometry = [(compIDofMfID[mfID], eN, ra, dc, aM, aME) 2524 for mfID, eN, ra, dc, aM, aME in photometry] 2525 counts = [(compIDofMfID[mfID], eN, cnt) for mfID, eN, cnt 2526 in self.archive.queryNumRows(detJoin, predicates, 2527 groupBy=detector)] 2528 self.poorDets = [(cmpID, eNum) for cmpID, eNum, number in counts 2529 if number < 20] 2530 2531 if self.poorDets: 2532 detSel = SelectSQL("%s.multiframeID,%s.extNum" % 2533 (self.programme.getDetectionTable(), 2534 self.programme.getDetectionTable()), 2535 detJoin, predicates, 2536 groupBy=detector + " HAVING COUNT(*)<20") 2537 predicates = ("aperMag%sErr BETWEEN 0 AND %s AND " 2538 "ppErrBits=0 AND %s.multiframeID IN (%s)" 2539 % (Cu13.aperture[self.programme.getAcronym().upper()], 2540 self.sysc.phtErrLimit, 2541 self.programme.getDetectionTable(), 2542 ','.join(map(str, compIDofMfID)))) 2543 2544 2545 fromStr = detJoin.fromStr + ",(%s) as T" % detSel 2546 whereStr = (detJoin.whereStr + 2547 " AND %s.multiframeID=T.multiframeID AND %s.extNum=T.extNum" 2548 % (self.programme.getDetectionTable(), 2549 self.programme.getDetectionTable())) 2550 photometryPoor = numpy.array(self.archive.query( 2551 selectStr=detector + ", ra, dec, aperMag%s, aperMag%sErr" 2552 % (Cu13.aperture[self.programme.getAcronym().upper()], 2553 Cu13.aperture[self.programme.getAcronym().upper()]), 2554 fromStr=fromStr, 2555 whereStr=whereStr, 2556 orderBy=detector + ", dec")) 2557 countsPoor = [(compIDofMfID[mfID], eN, cnt) for mfID, eN, cnt 2558 in self.archive.queryNumRows(fromStr, whereStr, 2559 groupBy=detector)] 2560 listPoor = [photometryPoor, countsPoor] 2561 else: 2562 photometry = [] 2563 counts = [] 2564 listPoor = [] 2565 return photometry, counts, listPoor
2566 2567 #-------------------------------------------------------------------------- 2568
2569 - def getCataloguePhotometry(self, components, detectorCounts, fTable):
2570 """ 2571 Runs through list of catalogues that are created, but not ingested and 2572 gets the photometry. 2573 2574 @param components: Must be an ordered list of ProductDetails for each 2575 component. 2576 @type components: list(ProductDetails) 2577 2578 @return: List of photometric values 2579 @rtype: list(tuple) 2580 2581 """ 2582 photometry = [] 2583 for compID, comp in enumerate(components): 2584 if comp.multiframeID < 0 or self.isDetTableTooBig: # Find component 2585 intFrameType = fits.determineFrameType(comp.imageName) 2586 intProdType = self.sysc.productTypeOfFrame(intFrameType) 2587 compProd = ProductDefinition(self, comp.productID, fTable, 2588 productType=intProdType, imagePathName=comp.imageName) 2589 for extNum, sources \ 2590 in self.getFitsCatData(compProd, calcOnly=True): 2591 if self.isPoor[extNum]: 2592 self.poorDets.append((compID, extNum)) 2593 detectorCounts.append((compID, extNum, len(sources))) 2594 photometry.extend((compID, extNum, ra, dec, aM, aME) 2595 for ra, dec, aM, aME in sources) 2596 2597 return photometry
2598 2599 #-------------------------------------------------------------------------- 2600
2601 - def getFitsCatData(self, bestProduct, isGrt=False, noMagLim=False, 2602 calcOnly=False):
2603 """ 2604 Checks astrometry and gets photometry from a catalogue product. 2605 2606 @return: List of pairs of extension numbers for detectors and their 2607 photometric measurements as an array of source measurements 2608 (ra, dec, aperMag, aperMagErr) in order of extension number. 2609 @rtype: list(tuple(int, numpy.array)) 2610 2611 """ 2612 factNo = (10000 if bestProduct.isTile() or bestProduct.isMosaic() else 2613 5000) 2614 2615 catName = (bestProduct.preGroutCatName() if isGrt else 2616 bestProduct.catName()) 2617 2618 aperFlux = \ 2619 'Aper_flux_%s' % Cu13.aperture[self.programme.getAcronym().upper()] 2620 self.isPoor = {} 2621 image = fits.open(bestProduct.fileName()) 2622 data = [] 2623 if calcOnly: 2624 cathdu = fits.open(catName) 2625 else: 2626 cathdu = fits.open(catName, 'update') 2627 # @note This seems to work, but is it just a copy 2628 for index, cat in enumerate(cathdu[1:]): 2629 extNum = index + 2 2630 tbdata = cat.data 2631 catHead = cat.header 2632 imdet = index + 1 if bestProduct.isMef() else 0 2633 imgHead = image[imdet].header 2634 try: 2635 posAng = image[0].header("HIERARCH ESO TEL POSANG") 2636 except: 2637 posAng = None 2638 # make sure that data set is not too large! 2639 fact = 1 2640 self.isPoor[extNum] = False 2641 if catHead['NAXIS2'] == 0: 2642 if not calcOnly: 2643 Logger.addMessage("Adding default header values to missing " 2644 "extension %s:%s" % (catName, extNum)) 2645 2646 for ap in range(1, 8, 1): 2647 catHead.update("APCOR%d" % ap, 0., 2648 "Default aperture correction") 2649 2650 catHead.update("RCORE", dbc.realDefault(), 2651 "Default rcore value") 2652 2653 catHead.update("SATURATE", dbc.realDefault(), 2654 "Default saturate value") 2655 2656 data.append((extNum, [])) 2657 continue 2658 # @TODO: make it consistent with class selection below. 2659 noStars = \ 2660 numpy.sum(numpy.equal(tbdata.field('Classification'), -1)) 2661 2662 if noStars > factNo: 2663 fact = math.floor(noStars / factNo) 2664 2665 # Make sure pv23 is correct for filter. 2666 # select Flux>0,FluxErr/Flux>0.02, class=-1 and fmod(seqNum,fact)=0 2667 goodObs = tbdata.field(aperFlux) > 0 2668 if noMagLim: 2669 if self.extractor == 'CASU': 2670 goodObs = numpy.logical_and(goodObs, 2671 numpy.logical_or( 2672 numpy.logical_or(tbdata.field('Classification') == -1, 2673 tbdata.field('Classification') == -9) 2674 , tbdata.field('Classification') == -2)) 2675 else: 2676 goodObs = numpy.logical_and(goodObs, 2677 numpy.logical_or( 2678 numpy.logical_or(tbdata.field('Classification') == -1, 2679 tbdata.field('Classification') == -2), 2680 tbdata.field('Classification') == -3)) 2681 2682 else: 2683 # avStellEll = catHead['ELLIPTIC'] 2684 # seeing = catHead['SEEING'] 2685 apFluxMsk = numpy.where(numpy.greater(tbdata.field(aperFlux), 0.), 2686 tbdata.field(aperFlux), 1.) 2687 goodObs = numpy.where(numpy.greater(tbdata.field('Aper_flux_3'), 0.), 2688 numpy.logical_and( 2689 numpy.logical_and(numpy.less(numpy.divide( 2690 tbdata.field(aperFlux + '_err'), 2691 apFluxMsk), 0.02), 2692 numpy.logical_or(numpy.equal(tbdata.field('Classification'), -2), 2693 numpy.equal(tbdata.field('Classification'), -1))), 2694 numpy.equal(tbdata.field('Error_bit_flag'), 0)), 0) 2695 # If too few objects remove classification selection 2696 # 20 is a bit arbitary? 2697 if numpy.nonzero(goodObs)[0].size < 20: 2698 self.isPoor[extNum] = True 2699 goodObs = numpy.where(numpy.greater(tbdata.field('Aper_flux_3'), 0.), 2700 numpy.logical_and(numpy.less(numpy.divide( 2701 tbdata.field(aperFlux + '_err'), 2702 tbdata.field(aperFlux)), 0.02), 2703 numpy.equal(tbdata.field('Error_bit_flag'), 0)), 0) 2704 2705 goodObs = \ 2706 numpy.nonzero( 2707 numpy.logical_and( 2708 numpy.logical_and(goodObs 2709 , tbdata.field('Error_bit_flag') == 0) 2710 , tbdata.field('Sequence_number') % fact == 0) 2711 )[0] 2712 dataArray = numpy.zeros([goodObs.size, 6 if noMagLim else 4]) 2713 2714 # ra and dec -- is this good enough 2715 dataArray[:, 0] = \ 2716 numpy.degrees(tbdata[goodObs].field('RA')).astype(numpy.float64) 2717 2718 dataArray[:, 1] = \ 2719 numpy.degrees(tbdata[goodObs].field('DEC')).astype(numpy.float64) 2720 2721 # WCS: 2722 cd11 = math.radians(imgHead['CD1_1']) 2723 cd12 = math.radians(imgHead['CD1_2']) 2724 cd21 = math.radians(imgHead['CD2_1']) 2725 cd22 = math.radians(imgHead['CD2_2']) 2726 crpix1 = imgHead['CRPIX1'] 2727 crpix2 = imgHead['CRPIX2'] 2728 x = tbdata[goodObs].field('X_coordinate') 2729 y = tbdata[goodObs].field('Y_coordinate') 2730 xi = cd11 * (x - crpix1) + cd12 * (y - crpix2) 2731 eta = cd21 * (x - crpix1) + cd22 * (y - crpix2) 2732 rad = numpy.hypot(xi, eta) 2733 if (self.sysc.hasEsoRelease and not (bestProduct.isMosaic() or bestProduct.isTile()) 2734 and self.sysc.deepSuffix not in bestProduct.fileName()): 2735 if not posAng: 2736 # Get posang from DB 2737 posAng = self.archive.query("telPosAng", 2738 "Multiframe as m, MultiframeEsoKeys as e", 2739 "m.multiframeID=e.multiframeID and m.fileName='%s'" % bestProduct.fileName(), 2740 firstOnly=True) 2741 theta = -math.radians(posAng) 2742 xir = xi * math.cos(theta) - eta * math.sin(theta) 2743 etar = xi * math.sin(theta) + eta * math.cos(theta) 2744 xi = xir 2745 eta = etar 2746 distortCorr = 1 2747 if not (bestProduct.isMosaic() or bestProduct.isTile()): 2748 if 'ZPN' in imgHead['CTYPE1'] and 'ZPN' in imgHead['CTYPE2']: 2749 pv21 = imgHead['PV2_1'] 2750 pv23 = imgHead['PV2_3'] 2751 distortCorr = 1 + rad * rad * 3 * pv23 / pv21 2752 distortCorr *= 1 + rad * rad * pv23 / pv21 2753 2754 elif 'TAN' in imgHead['CTYPE1'] and 'TAN' in imgHead['CTYPE2']: 2755 distortCorr = 1 / (numpy.tan(rad) ** 2 + 1) 2756 distortCorr *= numpy.arctan(rad) / rad 2757 distortCorr = \ 2758 numpy.where(numpy.equal(rad, 0.), 1., distortCorr) 2759 2760 distortCorr = 2.5 * numpy.log10(distortCorr) 2761 2762 # Deep stacks/mosaics taken over several nights: 2763 extinctCorr = 0 2764 if self.sysc.deepSuffix not in bestProduct.fileName(): 2765 extinct = catHead['EXTINCT'] 2766 amStart = image[0].header[self.sysc.amStart] 2767 amEnd = image[0].header[self.sysc.amEnd] 2768 extinctCorr = -extinct * ((amStart + amEnd) / 2.0 - 1.); 2769 2770 illumCorr = 0. 2771 if (self.sysc.deepSuffix not in bestProduct.fileName() and 2772 not (bestProduct.isMosaic() or bestProduct.isTile())): 2773 illumFile = self.archive.query("illumFile", "Multiframe", 2774 "fileName='%s'" % bestProduct.fileName(), 2775 firstOnly=True) 2776 illumTableName = os.path.join(self.sysc.illuminationDir(), 2777 illumFile + '.csv') 2778 illumCorr = calib.calcIllumCorrArray(illumTableName, xi, eta, 2779 fits.determineFrameType(bestProduct.fileName())) 2780 2781 2782 # select 2783 aFlux = tbdata[goodObs].field(aperFlux) 2784 aFluxErr = tbdata[goodObs].field(aperFlux + '_err') 2785 2786 # mag with no ZP 2787 zp = catHead['MAGZPT'] if bestProduct.isComponent else 0 2788 expTime = catHead[self.sysc.expTimeKeyWord] 2789 apCor3 = catHead.get( 2790 'APCOR%s' % Cu13.aperture[self.programme.getAcronym().upper()], 0) 2791 2792 dataArray[:, 2] = -2.5 * numpy.log10(aFlux / expTime) \ 2793 + extinctCorr + illumCorr + distortCorr - apCor3 + zp 2794 dataArray[:, 3] = numpy.where(aFlux > 0, 1.0857 * aFluxErr / aFlux, dbc.realDefault()) 2795 if noMagLim: 2796 dataArray[:, 4] = aFlux 2797 dataArray[:, 5] = tbdata[goodObs].field('Classification') 2798 2799 data.append((extNum, dataArray)) 2800 if not calcOnly: 2801 fits.checkForUnicodeError(cathdu, catName) 2802 cathdu.close() 2803 2804 return data
2805 2806 #-------------------------------------------------------------------------- 2807 2808 2809
2810 - def setUpProductsInfo(self):
2811 """ Gets information for these products... 2812 """ 2813 # @TODO: Get rid of views throughout and used namedTuple. 2814 2815 # Parse schema 2816 tableName = "Required" + self.pType.title() 2817 curSchema = self.sysc.curationSchema() 2818 columns = schema.parseTables(curSchema, [tableName])[0].columns 2819 self.prodInfo = queries.getProductInfo(self.archive, self.programmeID, 2820 self.pType, self.reqProdsOfType[self.pType]) 2821 #if self.pType == 'mosaic' and self.prodInfo and not self.sysc.isVSA(): 2822 # # Check to see if dualFilterID and then order by pointing and 2823 # # within pointing do dualFilterID first 2824 # data = [Cu13.Pointing(ii, prod.ra, prod.dec, prod.filterID, 2825 # prod.dualFilterID) for ii, prod in enumerate(self.prodInfo)] 2826 2827 # initPos = (data[0].ra, data[0].dec) 2828 # pointings = [] 2829 # dataOrder = [] 2830 # for datum in data: 2831 # if initPos == (datum.ra, datum.dec): 2832 # pointings.append(datum) 2833 # else: 2834 # dataOrder += dualFilterOrder(pointings) 2835 # initPos = (datum.ra, datum.dec) 2836 # pointings = [] 2837 2838 # dataOrder += dualFilterOrder(pointings) 2839 # self.prodInfo = [self.prodInfo[ii] for ii in dataOrder] 2840 2841 self.deepProductTable = df.View(map(str, columns), self.prodInfo)
2842 2843 #-------------------------------------------------------------------------- 2844
2845 - def calcDiskSpaceRequired(self, numProducts):
2846 """ 2847 Disk space required for stack, confidence and catalogue. 2848 2849 @return: Disk space required per deep stack. 2850 @rtype: float 2851 2852 """ 2853 bytesPerPix = 4 2854 2855 nuStep = (max(self.deepProductTable["nustep"]) 2856 if self.sysc.hasMicroStep else 1) 2857 if self.pType == 'stack': 2858 2859 nuStepStr = " AND nuStep=%s" % nuStep if self.sysc.hasMicroStep else "" 2860 nDet, nRows, nCols = self.archive.queryAttrMax( 2861 attrs="numDetectors, detRows, detCols", 2862 table=Join(["Multiframe", "ProgrammeFrame", "MultiframeDetector"], 2863 ["multiframeID"]), 2864 where="programmeID=%s%s" % (self.programmeID, nuStepStr) 2865 + " AND frameType LIKE '%stack' AND frameType not LIKE " 2866 "'%tile%' AND frameType not like '%mosaic%'") 2867 2868 if nDet is None: 2869 return 0 2870 2871 # Value in WSA is for compressed image 2872 totalSize = (nRows * nCols * bytesPerPix) / self.sysc.one_gigabyte 2873 2874 # Factor to go from size of image to size of image, conf, cat 2875 # conf=0.5*image, catalogue =?? * image. 2x should be sufficient 2876 multFact = 2.5 2877 return totalSize * nuStep * nDet * multFact * numProducts 2878 2879 elif self.pType == 'tile': 2880 # 6 pawprints + 1 tile + some temporary - 2881 # 6 filtered pawprints (no conf or cat) + filtered tile 2882 multFact = 3 2883 2884 return (numProducts + 2) * multFact \ 2885 * (1.2 * nuStep * 16 * 2048 * 2048 * 6 * bytesPerPix / (2 * self.sysc.one_gigabyte)) 2886 2887 elif self.pType == 'mosaic': 2888 # value in WSA is for compressed image 2889 # factor to go from size of image to size of image,conf,cat and 2890 # temporary files 2891 multFact = 3 2892 2893 # use raExtent,decExtent and scrunch 2894 # This will cope with mosaics of different sizes 2895 mosaicSize = [] 2896 for index in range(len(self.deepProductTable['productid'])): 2897 if self.deepProductTable['pixelsize'][index] > 0: 2898 pixelSize = self.deepProductTable['pixelsize'][index] 2899 else: 2900 pixelSize = self.sysc.instrumentPixScale / math.sqrt(self.deepProductTable['nustep'][index]) 2901 mosaicSize.append(3600 * 3600 * bytesPerPix 2902 * self.deepProductTable['raextent'][index] 2903 * self.deepProductTable['decextent'][index] 2904 / (pixelSize ** 2)) 2905 diskRequired = multFact * sum(mosaicSize) / self.sysc.one_gigabyte 2906 2907 Logger.addMessage("Disk space required for mosaics = %6.3fGb" 2908 % diskRequired) 2909 2910 return diskRequired
2911 2912 #-------------------------------------------------------------------------- 2913
2914 - def createProductDir(self, spaceRequired):
2915 """ 2916 Creates the directory to store newly created deep stacks. 2917 2918 @param spaceRequired: Amount of disk space required. 2919 @type spaceRequired: float 2920 2921 @return: Full path to directory where deep stacks will be created. 2922 @rtype: str 2923 2924 """ 2925 # @TODO: These system constants should really be just a single dict. 2926 prodDirDict = {'stack': self.sysc.stackDir, 2927 'mosaic': self.sysc.mosaicDir} 2928 2929 if self.sysc.hasOffsetPos: 2930 prodDirDict['tile'] = self.sysc.tileDir 2931 2932 if self.archive.isRealRun: 2933 prodDir = utils.getNextDisk(self.sysc, preAllocMem=spaceRequired) 2934 else: 2935 prodDir = self.sysc.testOutputPath() 2936 2937 prodDir = os.path.join(prodDir, prodDirDict[self.pType]) 2938 utils.ensureDirExist(prodDir) 2939 outputDirPath = os.path.join(prodDir, 2940 (self.restartDir if self.restartDir else 2941 '%s_v%s' % (self.dirDate, self.releaseNum))) 2942 2943 utils.ensureDirExist(outputDirPath) 2944 2945 return outputDirPath
2946 2947 #-------------------------------------------------------------------------- 2948
2949 - def getDualImageData(self, bestProduct):
2950 """ 2951 Creates a Product class for the dual filter image if there is one. 2952 2953 @param bestProduct: Amount of disk space required. 2954 @type bestProduct: float 2955 2956 @return: Full path to directory where deep stacks will be created. 2957 @rtype: str 2958 2959 """ 2960 prodID = bestProduct.deepProductTable["productID"][bestProduct.index] 2961 numProds = len(bestProduct.deepProductTable["productID"]) 2962 Logger.addMessage("Getting dual image data for product %s of %s" 2963 % (prodID, numProds)) 2964 2965 # Check to see if it exists. 2966 dualImageProductID = bestProduct.getDualProductID() 2967 dualProduct = \ 2968 ProductDefinition(self, dualImageProductID, bestProduct.fTable) 2969 2970 dualImagePath = fits.findProduct(self.programmeID, self.releaseNum, 2971 dualImageProductID, self.pType, self.prodInfo, bestProduct.fTable, 2972 isDev=not self.archive.isRealRun, sysc=self.sysc) 2973 2974 if not dualImagePath: 2975 return 2976 2977 dualProduct.outputDir = os.path.dirname(dualImagePath) 2978 2979 return dualProduct
2980 2981 #-------------------------------------------------------------------------- 2982
2983 - def stackImages(self, images, bestProduct, expTimes):
2984 """ 2985 Creates a deep stack image from a list of component images using 2986 external CASU tools. 2987 2988 """ 2989 imageFiles = ','.join(filePath.image for filePath in images) 2990 cats = ','.join(filePath.cat for filePath in images if filePath.cat) 2991 2992 dodgyExt = [ii for ii, expTime in enumerate(expTimes) if expTime == 0] 2993 # @FIXME: This will work for bright stars, but not generally. 2994 # When is there a problem? 2995 if len(dodgyExt) > 0: 2996 Logger.addMessage("Some extensions (%s) with zero total exposure time. " 2997 "For now these will be stacked as if there was data. " 2998 % ','.join(map(str, dodgyExt))) 2999 confMaps = \ 3000 ','.join(fits.getConfMap(filePath.image) for filePath in images 3001 if filePath.conf) 3002 else: 3003 confMaps = \ 3004 ','.join(filePath.conf for filePath in images if filePath.conf) 3005 3006 3007 deepFilePath = bestProduct.fileName() 3008 confFilePath = bestProduct.confName() 3009 if len(images) > 1: # Only stack if more than one image 3010 3011 stacker = os.path.join(self.casuSoftDir, self.stacker) if self.casuSoftDir else self.stacker 3012 cmd = stacker + " %s %s %s %s %s --method %d --expkey %s"\ 3013 " --lthr 5 --hthr 5 --nplate 6 --seewt" % (imageFiles, 3014 confMaps, cats, deepFilePath, confFilePath, 3015 self.sysc.isVSA(), self.sysc.expTimeKeyWord) 3016 3017 cmdOutput = ''.join(extp.out(cmd)) 3018 if Logger.isVerbose or not os.path.exists(deepFilePath): 3019 Logger.addMessage(cmdOutput) 3020 3021 if not os.path.exists(deepFilePath): 3022 raise Cu13.CuError("Deep stack not created. Check the disks " 3023 "of the input stacks: " + imageFiles) 3024 3025 if bestProduct.part: # Part stacks need catalogues for each part 3026 images = self.getOBLevel(images, bestProduct) 3027 fits.prepHeaders(bestProduct.fileName(), 3028 bestProduct.confName(), images, 3029 bScale=(bestProduct.bScale if self.sysc.hasEsoRelease 3030 else None)) 3031 3032 comps = [Cu13.ProductDetails(dbc.intDefault(), dbc.intDefault(), 3033 f.image, f.conf, f.cat) 3034 for f in images] 3035 3036 self.createCatalogue(bestProduct, comps, isQuick=True)
3037 3038 #-------------------------------------------------------------------------- 3039
3040 - def tileImages(self, inputImages, bestProduct, isFiltered=False):
3041 """ 3042 """ 3043 imageFiles = ','.join(filePath.imageName for filePath in inputImages) 3044 3045 confMaps = ','.join(filePath.confName for filePath in inputImages 3046 if filePath.confName) 3047 3048 deepFilePath = (bestProduct.filteredName() if isFiltered else 3049 bestProduct.fileName()) 3050 3051 confFilePath = (bestProduct.filtConfName() if isFiltered else 3052 bestProduct.confName()) 3053 tiler = os.path.join(self.casuSoftDir, self.tiler) if self.casuSoftDir else self.tiler 3054 cmd = (tiler, imageFiles, confMaps, deepFilePath, confFilePath) 3055 3056 if len(inputImages) == 6: # Only tile if correct number of images 3057 stdOut = extp.out(' '.join(cmd)) # @TODO: Try running outside shell 3058 Logger.addMessage(''.join(stdOut), 3059 alwaysLog=not os.path.exists(deepFilePath)) 3060 3061 if not os.path.exists(deepFilePath): 3062 raise Cu13.CuError("Deep tile not created. Check the disks of the " 3063 "input stacks: " + imageFiles)
3064 3065 3066 #-------------------------------------------------------------------------- 3067
3068 - def filterImages(self, inputImage, filteredFileName):
3069 """ 3070 """ 3071 medfiltsize = "101" 3072 linfiltsize = "33" 3073 filterer = os.path.join(self.casuSoftDir, self.filterer) if self.casuSoftDir else self.filterer 3074 cmd = (filterer, inputImage.imageName, inputImage.confName, 3075 filteredFileName, medfiltsize, linfiltsize) 3076 3077 stdOut = extp.out(' '.join(cmd)) # @TODO: Try running outside shell 3078 Logger.addMessage(''.join(stdOut), 3079 alwaysLog=not os.path.exists(filteredFileName)) 3080 3081 if not os.path.exists(filteredFileName): 3082 raise Cu13.CuError("Filtered image not created. Check the disks " 3083 "of the input stack: " + inputImage.imageName) 3084 3085 return filteredFileName
3086 3087 #-------------------------------------------------------------------------- 3088
3089 - def checkProductStatus(self, reqProducts, reqInputs):
3090 """ Determines which products require processing. 3091 """ 3092 completeProducts = [] 3093 newProducts = [] 3094 for prodID in reqProducts: 3095 fType, mfID, images = reqInputs[prodID] 3096 if mfID > 0 and not self.reprocess: 3097 completeProducts.append((prodID, mfID)) 3098 else: 3099 newProducts.append((prodID, fType, images)) 3100 3101 return completeProducts, newProducts
3102 3103 #-------------------------------------------------------------------------- 3104
3105 - def testProduct(self, bestProduct, components, calcOnly=False):
3106 """ 3107 Tests magnitude limit, seeing, classifications against expectations 3108 based on input files. Doesn't work for products made of previous 3109 products. These have already been calculated ... 3110 3111 """ 3112 filterID = bestProduct.deepProductTable["filterID"][bestProduct.index] 3113 filterTable = df.Table("Filter", self.archive) 3114 filterTable.setCurRow(filterID=filterID) 3115 if calcOnly: 3116 cathdul = fits.open(bestProduct.catName()) 3117 imghdul = fits.open(bestProduct.fileName()) 3118 else: 3119 cathdul = fits.open(bestProduct.catName(), 'update') 3120 imghdul = fits.open(bestProduct.fileName(), 'update') 3121 if len(imghdul) != len(cathdul) \ 3122 and not (len(imghdul) is 1 and len(cathdul) is 2 3123 and bestProduct.isTile() and not bestProduct.isMef()): 3124 3125 raise Cu13.CuError("ProductID %d, file %s has different a number " 3126 "of extensions than the catalogue %s." 3127 % (bestProduct.deepProductTable["productID"][bestProduct.index], 3128 bestProduct.fileName(), bestProduct.catName())) 3129 3130 # Get data for product (mag,class) 3131 magClassDict = dict( 3132 self.getFitsCatData(bestProduct, isGrt=False, noMagLim=True, 3133 calcOnly=calcOnly)) 3134 3135 3136 self.abMagLimit = dict() 3137 for ii, hdu in enumerate(cathdul): 3138 extNum = ii + 1 3139 imgInd = ii if bestProduct.isMef() else 0 3140 if ii > 0 and magClassDict[extNum] is not None: 3141 # @FIXME: Seeing calculation is dead code? 3142 # Get components 3143 3144 # compData = self.getComponentData(components, extNum) 3145 # check seeing 3146 #seeing = hdu.header["SEEING"] * bestProduct.getPixelScale(extNum) 3147 #medSee, psdSee = stats.clippedMADHistogram(compData[0]) 3148 #if seeing > medSee + psdSee or seeing < medSee - psdSee: 3149 # Logger.addMessage("Expected seeing is %s +/- %s, measured " 3150 # "seeing is %s" % (medSee, psdSee, seeing)) 3151 3152 # check mag limit 3153 totExpTime = hdu.header["TEXPTIME"] 3154 expML = (filterTable.getAttr("oneSecMlVg") 3155 + 1.25 * math.log10(totExpTime) 3156 if totExpTime > 0. else dbc.realDefault()) 3157 3158 if self.extractor == 'SEX': 3159 # The fields have had better sky subtraction and tend to 3160 # be deep extragalaxtic ones taken indark time 3161 expML += 1 3162 magClass = magClassDict[extNum] 3163 if expML > 0 and len(magClass) > 0: 3164 if bestProduct.isComponent: 3165 magData = magClass[:, 2] 3166 else: 3167 magData = numpy.add(magClass[:, 2], hdu.header["MAGZPT"]) 3168 magErrData = magClass[:, 3] 3169 mLimit = getRobustLimit(magData, magErrData, expML) 3170 abMagLimit = mLimit + filterTable.getAttr("vegaToAB") 3171 else: 3172 abMagLimit = dbc.realDefault() 3173 self.abMagLimit[extNum] = abMagLimit 3174 if not calcOnly: 3175 hdu.header.update("ABMAGLIM", abMagLimit, 3176 "point source 5-sigma limiting AB magnitude") 3177 3178 imghdul[imgInd].header.update("ABMAGLIM", abMagLimit, 3179 "point source 5-sigma limiting AB magnitude") 3180 3181 # @TODO: Saturation limit -- 1 exposure. 3182 # How does coaveraging work? 3183 # fluxData=magClass[:,4] 3184 # classData=magClass[:,5] 3185 # saturate=hdu.header["SATURATE"] 3186 # satLimit=getSaturLimit(saturate,fluxData,magData,classData) 3187 # abSatLimit=satLimit+filterTable.getAttr("vegaToAB") 3188 # hdu.header.update("ABMAGSAT",abSatLimit, 3189 # "Saturation limit for point sources (AB magnitude)") 3190 # imghdul[ii].header.update("ABMAGSAT",abSatLimit, 3191 # "Saturation limit for point sources (AB magnitude)") 3192 3193 # check classifications as func of mag 3194 if not calcOnly: 3195 fits.checkForUnicodeError(cathdul, bestProduct.catName()) 3196 fits.checkForUnicodeError(imghdul, bestProduct.fileName()) 3197 cathdul.close() 3198 imghdul.close()
3199 3200 #-------------------------------------------------------------------------- 3201
3202 - def checkQBFlags(self):
3203 """ 3204 Check whether ppErrBitStatus is set correctly for each intermediate 3205 stack in list. 3206 3207 """ 3208 Logger.addMessage("Checking whether detection quality bit-flags need " 3209 "updating...") 3210 3211 # @GOTCHA: Need to include the selection on programmeID as well as 3212 # multiframeID to handle the case where a multiframe is shared 3213 # amongst two programmes with the different detection table. This also 3214 # covers the case of non-surveys that have entries for the default 3215 # programmeID. Also, must check that detections exist, as if any 3216 # multiframe has no detections then they will have ppErrBitsStatus 3217 # values of zero by default. 3218 3219 if self.archive.queryEntriesExist( 3220 Join(["ProgrammeFrame", self.programme.getDetectionTable()], 3221 ["multiframeID"]), 3222 where="ppErrBitsStatus=0 AND programmeID=%s" % self.programmeID): 3223 3224 raise Cu13.CuError( 3225 "Detection quality bit flags have not been updated for this " 3226 "programme's intermediate stacks!")
3227 3228 #-------------------------------------------------------------------------- 3229
3230 - def classifyCat(self, catName, areZeroSizeExts):
3231 """ Classify cat 3232 """ 3233 Logger.addMessage("Classifying catalogue " + catName, 3234 alwaysLog=False) 3235 classifier = os.path.join(self.casuSoftDir, "classify") if self.casuSoftDir else "classify" 3236 3237 try: 3238 extp.run(classifier + " " + catName, ignoreMsgs=self.extractIgnoreMsgs) 3239 except extp.Error as error: 3240 if not ((len(set(error.stdErr).difference(set(self.missExtensionErrorMsgs))) > 0 3241 or len(set(self.missExtensionErrorMsgs).difference(set(error.stdErr))) > 0) 3242 and areZeroSizeExts): 3243 raise Cu13.CuError("unexpected classify errors: %s" % '; '.join(error.stdErr))
3244 3245 3246 3247 #-------------------------------------------------------------------------- 3248
3249 - def extractCat(self, bestProduct, areZeroSizeExts):
3250 """ Run imcore 3251 """ 3252 nPix, thres, crowd, rCore, nBlockSize, fwhm = \ 3253 bestProduct.getCasuExtArgs() 3254 3255 cnfName = bestProduct.confName() 3256 catName = (bestProduct.preGroutCatName() if bestProduct.isTile() else 3257 bestProduct.catName()) 3258 3259 imgName = (bestProduct.filteredName() if bestProduct.isTile() else 3260 bestProduct.fileName()) 3261 3262 argstr = ' '.join([imgName, cnfName, catName, str(nPix), str(thres)]) 3263 argstr += " --crowd %s --rcore %s --nbsize %s --noell" \ 3264 % (crowd, rCore, nBlockSize) 3265 if fwhm > 0: 3266 argstr += " --filtfwhm %s" % fwhm 3267 argstr += " --cattype %s" % self.sysc.casuCatType 3268 Logger.addMessage("Extracting for camera " + self.sysc.instrumentName, 3269 alwaysLog=False) 3270 extractor = os.path.join(self.casuSoftDir, "imcore") if self.casuSoftDir else "imcore" 3271 try: 3272 extp.run(extractor + " " + argstr, ignoreMsgs=self.extractIgnoreMsgs) 3273 except extp.Error as error: 3274 if not ((len(set(error.stdErr).difference(set(self.missExtensionErrorMsgs))) > 0 3275 or len(set(self.missExtensionErrorMsgs).difference(set(error.stdErr))) > 0) 3276 and areZeroSizeExts): 3277 raise Cu13.CuError("unexpected imcore errors: %s" % '; '.join(error.stdErr))
3278 3279 #-------------------------------------------------------------------------- 3280
3281 - def groutCat(self, catName, finalCatName, srcPath):
3282 """ Grout catalogue 3283 """ 3284 Logger.addMessage("Grouting catalogue " + finalCatName, 3285 alwaysLog=False) 3286 grouter = os.path.join(self.casuSoftDir, "grout") if self.casuSoftDir else "grout" 3287 extp.run("%s %s %s --srcpath %s" % (grouter, catName, finalCatName, srcPath), 3288 ignoreMsgs=self.extractIgnoreMsgs)
3289 3290 #-------------------------------------------------------------------------- 3291
3292 - def wcsfit(self, bestProduct, threadNo, areZeroSizeExts):
3293 """ 3294 """ 3295 catName = (bestProduct.preGroutCatName() if bestProduct.isTile() else 3296 bestProduct.catName()) 3297 3298 imgName = (bestProduct.filteredName() if bestProduct.isTile() else 3299 bestProduct.fileName()) 3300 3301 Logger.addMessage("WCSFIT for camera " + self.sysc.instrumentName, 3302 alwaysLog=False) 3303 curDir = os.getcwd() 3304 newDir = os.path.join(curDir, "thread%s_%s" % (threadNo, 3305 self.cuEventID)) 3306 fitter = os.path.join(self.casuSoftDir, "wcsfit") if self.casuSoftDir else "wcsfit" 3307 if not os.path.exists(newDir): 3308 os.mkdir(newDir) 3309 os.chdir(newDir) 3310 try: 3311 extp.run("%s %s %s --catsrc local2mass --catpath %s" 3312 % (fitter, imgName, catName, self.sysc.casu2massDir), 3313 ignoreMsgs=self.extractIgnoreMsgs) 3314 except extp.Error as error: 3315 print(error.stdErr) 3316 if not (error.stdErr == 'Error: wcsfit failed' and areZeroSizeExts): 3317 raise Cu13.CuError("unexpected wcsfit errors: %s" % '; '.join(error.stdErr)) 3318 os.chdir(curDir)
3319 3320 #-------------------------------------------------------------------------- 3321
3322 - def runCasuExtractor(self, bestProduct):
3323 """ 3324 Run CASU Extractor. 3325 3326 @param bestProduct: Details of the catalogue to extract. 3327 @type bestProduct: ProductDefinition 3328 @TODO: Obsolete? 3329 """ 3330 3331 nPix, thres, crowd, rCore, nBlockSize, fwhm = \ 3332 bestProduct.getCasuExtArgs() 3333 3334 cnfName = bestProduct.confName() 3335 catName = (bestProduct.preGroutCatName() if bestProduct.isTile() else 3336 bestProduct.catName()) 3337 3338 imgName = (bestProduct.filteredName() if bestProduct.isTile() else 3339 bestProduct.fileName()) 3340 3341 argstr = ' '.join([imgName, cnfName, catName, str(nPix), str(thres)]) 3342 argstr += " --crowd %s --rcore %s --nbsize %s --noell" \ 3343 % (crowd, rCore, nBlockSize) 3344 3345 if bestProduct.isTile(): 3346 tmpGroutDir = self.sysc.tempWorkPath() 3347 shutil.rmtree(tmpGroutDir) 3348 utils.ensureDirExist(tmpGroutDir) 3349 3350 for filePath, _mfID, _fType \ 3351 in pu.getWholeProv(self.archive, bestProduct.fileName()): 3352 3353 if self.sysc.filtPrefix in os.path.basename(filePath): 3354 origFilePath = os.path.join(os.path.dirname(filePath), 3355 os.path.basename(filePath) 3356 .replace(self.sysc.filtPrefix, '')) 3357 3358 linkList = [filePath] 3359 3360 else: 3361 origFilePath = filePath 3362 linkList = [] 3363 3364 provConfName = fits.getConfMap(origFilePath) 3365 provCatName = fits.getCatalogue(origFilePath) 3366 linkList += [origFilePath, provConfName, provCatName] 3367 for fileP in linkList: 3368 linkName = os.path.basename(fileP) 3369 if self.sysc.stackSuffix in fileP: 3370 linkName = os.path.join( 3371 os.path.basename(os.path.dirname(fileP)), linkName) 3372 3373 linkName = os.path.join(tmpGroutDir, linkName) 3374 3375 Logger.addMessage("Creating soft link from %s to %s" 3376 % (fileP, linkName), alwaysLog=False) 3377 3378 utils.ensureDirExist(os.path.dirname(linkName)) 3379 extp.run('ln -s %s %s' % (fileP, linkName), isVerbose=False) 3380 3381 if fwhm > 0: 3382 argstr += " --filtfwhm %s" % fwhm 3383 argstr += " --cattype %s" % self.sysc.casuCatType 3384 3385 Logger.addMessage("Extracting for camera " + self.sysc.instrumentName, 3386 alwaysLog=False) 3387 3388 commands = ["imcore " + argstr, 3389 "wcsfit %s %s --catsrc local2mass --catpath %s" 3390 % (imgName, catName, self.sysc.casu2massDir)] 3391 3392 if bestProduct.isTile(): 3393 # Run usual casutool commands first, then additional ones 3394 for cmd in commands: 3395 extp.run(cmd, ignoreMsgs=self.extractIgnoreMsgs) 3396 3397 components = bestProduct.findRequiredInputs() 3398 srcPath = tmpGroutDir #os.path.dirname(list(components)[0].imageName) 3399 zpOfExt = self.calcZP(bestProduct, components, isGrt=True) 3400 prov = pu.Provenance(self.archive, bestProduct.fileName(), 3401 components) 3402 updateHeaders(bestProduct, zpOfExt, prov, isGrt=True) 3403 finalCatName = bestProduct.catName() 3404 commands = [ 3405 "grout %s %s --srcpath %s" % (catName, finalCatName, srcPath), 3406 "classify " + finalCatName] 3407 3408 3409 for cmd in commands: 3410 extp.run(cmd, ignoreMsgs=self.extractIgnoreMsgs) 3411 3412 if bestProduct.isTile(): # Remove temporary pre-grouted catalogues 3413 os.remove(catName)
3414 3415 # Update with casutools softvers 3416 3417 #-------------------------------------------------------------------------- 3418
3419 - def getRequiredMemory(self, files):
3420 """ 3421 Calculates memory required to process an image batch. 3422 3423 @return: Memory required in kilobytes. 3424 @rtype: float 3425 3426 """ 3427 memory = 0 3428 if not files: 3429 return memory 3430 3431 for fileName in files: 3432 if fileName.conf: 3433 break 3434 else: 3435 return memory 3436 3437 memory = getFileSizeKiloBytes(fileName.image) 3438 3439 # Include output 3440 if self.sysc.isVSA(): 3441 return memory * math.sqrt(len(files)) 3442 elif self.sysc.isWSA(): 3443 return memory * (len(files) + 1) 3444 if self.sysc.isOSA(): 3445 # @TODO: If we do OSA stacking need to determine this. 3446 raise Cu13.CuError("Need to test to get correct memory usage. ")
3447 #-------------------------------------------------------------------------- 3448 3449 @staticmethod
3450 - def getUsefulNumCPUs(numCPUs, progTable, stackOnly=False):
3451 """ 3452 Takes the requested number of CPUs and returns the effective useful 3453 number of CPUs. 3454 3455 @param numCPUs: Requested number of CPUs. 3456 @type numCPUs: int 3457 @param progTable: Programme table for current database with the current 3458 row set to that of the programme being curated. 3459 @type progTable: df.ProgrammeTable 3460 3461 @return: Effective useful number of CPUs for this script. 3462 @rtype: int 3463 3464 """ 3465 sysc = progTable._db.sysc 3466 progID = progTable.getAttr("programmeID") 3467 numCPUs = numCPUs or int(0.6 * multiprocessing.cpu_count()) 3468 3469 if numCPUs < SystemConstants.minNumCPUs and sysc.hasOffsetPos \ 3470 and progTable.getAttr("sourceProdType") == 'tile' and \ 3471 not stackOnly and progID not in [sysc.scienceProgs.get("VHS"), 3472 sysc.scienceProgs.get("VIDEO")]: 3473 3474 raise Cu13.CuError("This server has fewer CPUs available (or " 3475 "number of threads requested) at %s than required %s for deep " 3476 "tiling" % (numCPUs, SystemConstants.minNumCPUs)) 3477 3478 return min(int(0.6 * multiprocessing.cpu_count()), numCPUs)
3479
3480 #------------------------------------------------------------------------------ 3481 3482 -class ProductDefinition(object):
3483 """ Determination of the best possible configuration of a specific product. 3484 """ 3485 archive = None #: ? 3486 clip = None #: ? 3487 compProductType = None #: ? 3488 dateRange = None #: ? 3489 dirDate = None #: ? 3490 existingProducts = None #: ? 3491 extDirectory = None #: ? 3492 external = False #: ? 3493 frameType = None #: ? 3494 fTable = None #: ? 3495 imagePathName = None #: ? 3496 inparFile = None #: ? 3497 isComponent = False #: ? 3498 newProductsOfType = None #: ? 3499 notDepOnly = False #: ? 3500 numStack = None #: ? 3501 outputDir = None #: ? 3502 part = None #: ? 3503 problemComponents = None #: ? 3504 prodMfID = None #: ? 3505 productType = None #: ? 3506 programme = None #: ? 3507 programmeID = None #: ? 3508 releaseNum = None #: ? 3509 reprocess = None #: ? 3510 sysc = None #: ? 3511 swarpdir = None #: ? 3512 useDeep = False #: ? 3513 wcs = None #: ? 3514 bScale = 1. #: datascale value. 3515 doNotUpdate = False #: Do not update flag 3516 componentOrder = None #: Order of components. 3517 _dualProductID = None #: ? 3518 imageProductDict = None 3519 idImageProductDict = None 3520 deepProdMfIDDict = None 3521 #-------------------------------------------------------------------------- 3522
3523 - def __init__(self, cu13obj, productID, filterTable, productType=None, 3524 imagePathName=None):
3525 """ ? 3526 """ 3527 self.archive = cu13obj.archive 3528 self.dateRange = cu13obj.dateRange 3529 self.dirDate = cu13obj.dirDate 3530 self.existingProducts = cu13obj.existingProducts 3531 self.fTable = filterTable 3532 self.imagePathName = (fits.stripServer(imagePathName) 3533 if imagePathName else imagePathName) 3534 3535 self.isComponent = self.imagePathName is not None 3536 self.newProductsOfType = cu13obj.newProductsOfType 3537 self.notDepOnly = cu13obj.notDepOnly 3538 self.numStack = cu13obj.numStack 3539 self.productType = productType or cu13obj.pType 3540 self.prodDirectory = cu13obj.prodDirectory 3541 self.programme = cu13obj.programme 3542 self.programmeID = cu13obj.programmeID 3543 self.releaseNum = cu13obj.releaseNum 3544 self.reprocess = cu13obj.reprocess 3545 self.sysc = cu13obj.sysc 3546 self.swarpdir = self.sysc.swarpDir 3547 self.useDeep = cu13obj.useDeep 3548 self._tmpWorkDir = cu13obj._tmpWorkDir 3549 self.imageProductDict = cu13obj.imageProductDict 3550 self.idImageProductDict = cu13obj.idImageProductDict 3551 self.deepProdMfIDDict = cu13obj.deepProdMfIDDict 3552 self.deepProductTable = cu13obj.deepProductTableDict[self.productType] 3553 try: 3554 self.index = self.deepProductTable["productID"].index(productID) 3555 except ValueError: 3556 self.index = None
3557 3558 3559 #-------------------------------------------------------------------------- 3560
3561 - def findRequiredInputs(self):
3562 """ 3563 Finds required components 3564 If the product is a deep pawprint, the components are single OB 3565 pawprints. If the product is a tile or mosaic, then the product 3566 links must be checked. 3567 3568 @return: List of component frames (mfID,fileName,confName,catName). 3569 @rtype: list(Cu13.ProductDetails) 3570 3571 """ 3572 if self.productType != 'stack': 3573 components = self.getCombiProductComponents() 3574 else: 3575 components = self.getStackComponents() 3576 3577 self.compProductType = self.productType 3578 if components: 3579 self.newProduct = True 3580 3581 return components
3582 3583 #-------------------------------------------------------------------------- 3584
3585 - def getCasuExtArgs(self):
3586 """ @return: Tuple of command-line arguments required for the CASU 3587 source extractor. 3588 @rtype: ? 3589 """ 3590 # @FIXME: What are these arguments for OSA 3591 microStep = self.deepProductTable["nustep"][self.index] 3592 nPixels = 4 3593 crowd = 1 3594 thresh = self.sysc.casuDetThresh[microStep] 3595 rCore = int(math.sqrt(microStep)) * self.sysc.rCoreBasic 3596 smooth = {1:2, 4:3.5, 9:5}[microStep] 3597 nBlockSize = 64 * int(math.sqrt(microStep)) 3598 3599 return nPixels, thresh, crowd, rCore, nBlockSize, smooth
3600 3601 #-------------------------------------------------------------------------- 3602
3603 - def getDualProductID(self):
3604 """ @return: productID of dual image product. 3605 @rtype: int 3606 @todo: once CU9 is finished, get rid of this 3607 """ 3608 if self._dualProductID: 3609 return self._dualProductID 3610 3611 # get filterID 3612 dualImageFilterID = self.deepProductTable["dualFilterID"][self.index] 3613 if dualImageFilterID <= 0 or self.sysc.isVSA(): 3614 return 3615 #mainProdID = self.deepProductTable["productID"][self.index] 3616 3617 # get productID 3618 # has same ra,dec as current productID, but different filterID 3619 #productInfo = self.getRowList() 3620 fieldIDP = self.deepProductTable["fieldID"][self.index] 3621 dualProdIdcs = [idx for idx in range(len(self.deepProductTable["fieldID"])) 3622 if self.deepProductTable["fieldID"][idx] == fieldIDP and 3623 self.deepProductTable["filterID"][idx] == dualImageFilterID] 3624 if len(dualProdIdcs) is 1: 3625 self._dualProductID = self.deepProductTable["productID"][dualProdIdcs[0]] 3626 return self._dualProductID
3627 3628 #-------------------------------------------------------------------------- 3629
3630 - def getRowList(self):
3631 """ TODO: This should be part of DataFactory.View, maybe it already is? 3632 """ 3633 if not self.deepProductTable: 3634 return [] 3635 3636 attrs = ["productID", "filterID", "ra", "dec"] 3637 defaultList = [dbc.intDefault(), dbc.intDefault(), dbc.realDefault(), 3638 dbc.realDefault()] 3639 3640 # TODO: There is a better way to do this with DataFactory.View! 3641 numRows = len(self.deepProductTable[self.deepProductTable.keys()[0]]) 3642 rowList = [] 3643 for rowNo in range(numRows): 3644 valueList = [] 3645 for attrNo, attr in enumerate(attrs): 3646 if self.deepProductTable[attr]: 3647 valueList.append(self.deepProductTable[attr][rowNo]) 3648 else: 3649 valueList.append(defaultList[attrNo]) 3650 3651 rowList.append(tuple(valueList)) 3652 3653 return rowList
3654 3655 #-------------------------------------------------------------------------- 3656
3657 - def getStackComponents(self):
3658 """ 3659 List of input intermediate stacks components, selected from archive. 3660 If only one, then it sets the prodMfID to the multiframeID of this 3661 single component, which will become the product. 3662 3663 @return: List of component frames. 3664 @rtype: set(Cu13.ProductDetails) 3665 3666 """ 3667 prodID = self.deepProductTable["productID"][self.index] 3668 filterID = self.deepProductTable["filterID"][self.index] 3669 ra = self.deepProductTable["ra"][self.index] 3670 dec = self.deepProductTable["dec"][self.index] 3671 numStack = self.numStack \ 3672 or self.deepProductTable["numberStk"][self.index] 3673 3674 if numStack < 0: 3675 numStack = None 3676 Logger.addMessage("Required Stack: %s at (%s, %s) filterID: %s" % 3677 (prodID, ra, dec, filterID), alwaysLog=False) 3678 images = self.imageProductDict[prodID] 3679 3680 if not images: 3681 images = self.idImageProductDict[prodID] 3682 if not images: 3683 return images 3684 else: 3685 # Only select first one - use as product, no stacking 3686 images = [images[0]] 3687 3688 Logger.addMessage("Required Stack: %s, numStack: %s" % 3689 (prodID, numStack), alwaysLog=False) 3690 combID = prodID if not self.reprocess and self.useDeep else None 3691 # @TODO: Don't need first part of this... 3692 prodMfID = self.deepProdMfIDDict[prodID] if self.deepProdMfIDDict and prodID in self.deepProdMfIDDict else None 3693 if len(images) is 1 and not prodMfID: 3694 bestImages = [] 3695 self.prodMfID = images[0] 3696 problemImages = None 3697 else: 3698 bestImages, problemImages = queries.getStacks(self.archive, 3699 self.programmeID, set(images), combID, number=numStack, 3700 nonDepOnly=self.notDepOnly, productMfID=prodMfID) 3701 if problemImages: 3702 # get stacking recipe info 3703 probMfIDs = ", ".join(str(img) for img in problemImages) 3704 nustepStr = " nustep," if self.sysc.hasMicroStep else "" 3705 if not self.sysc.isOSA(): 3706 self.problemComponents = self.archive.query( 3707 selectStr="multiframeID, grpNum, njitter,%s " 3708 "jitterID, jitterX, jitterY, recipe" % nustepStr, 3709 fromStr="Multiframe", 3710 whereStr="multiframeID IN (%s)" % probMfIDs) 3711 else: 3712 self.problemComponents = self.archive.query( 3713 selectStr="multiframeID", 3714 fromStr="Multiframe", 3715 whereStr="multiframeID IN (%s)" % probMfIDs) 3716 3717 if len(bestImages) <= 1: 3718 if bestImages: 3719 self.prodMfID = bestImages.pop().multiframeID 3720 return set() 3721 3722 # @TODO: Does this need to be ordered. 3723 # What happens if an OB is deprecated in one offset but not 3724 # others? 3725 return set(Cu13.ProductDetails._make((dbc.intDefault(),) + bestImage) 3726 for bestImage in bestImages)
3727 3728 #-------------------------------------------------------------------------- 3729
3730 - def getCombiProductComponents(self):
3731 """ 3732 Queries the database for each required combined product to see if 3733 component products exist or if they have been recently created. 3734 3735 @return: List of stacks with components. 3736 @rtype: list(ProductDetails) 3737 3738 """ 3739 combProdID = self.deepProductTable["productID"][self.index] 3740 filterID = self.deepProductTable["filterID"][self.index] 3741 ra = self.deepProductTable["ra"][self.index] 3742 dec = self.deepProductTable["dec"][self.index] 3743 Logger.addMessage("Required %s: %s at (%s, %s) filterID: %s" 3744 % (self.productType, combProdID, ra, dec, filterID), alwaysLog=False) 3745 3746 reqComponents = self.archive.query( 3747 selectStr="intProductID, intProdType", 3748 fromStr="ProductLinks", 3749 whereStr="programmeID=%s AND combiProdType=%r AND combiProductID=%s" 3750 % (self.programmeID, self.productType, combProdID)) 3751 3752 # Check components 3753 components = [] 3754 missingComponentsOfType = defaultdict(list) 3755 allOld = True 3756 # If all reqComponents are not present return None 3757 for prodID, prodType in reqComponents: 3758 # Check if external inputs 3759 if prodID < 0 and prodType == self.productType: 3760 # EXTERNAL PRODUCT e.g. UDS mosaics 3761 prodFilePath = self.getExternalInputs() 3762 self.external = prodFilePath is not None 3763 if self.external: 3764 allOld = False 3765 components.append(prodFilePath) 3766 else: 3767 Logger.addMessage("<Info> No external product available " 3768 "for %s %s" % (self.productType, combProdID)) 3769 else: 3770 # Check to see if new products have been created 3771 newProd = self.newProductsOfType[prodType].get(prodID) 3772 if newProd: 3773 components.append(newProd) 3774 allOld = False 3775 else: 3776 # Check old products 3777 for prod in self.existingProducts: 3778 if (prod.productID == prodID 3779 and prod.releaseNum == self.releaseNum 3780 and self.sysc.productTypeOfFrame(prod.frameType) 3781 == prodType): 3782 components.append(Cu13.ProductDetails( 3783 prod.productID, prod.multiframeID, prod.imgName, 3784 prod.confName, prod.catName)) 3785 3786 break 3787 else: 3788 components = [] 3789 missingComponentsOfType[prodType].append(prodID) 3790 3791 for prodType in missingComponentsOfType: 3792 raise Cu13.CuError("No intermediate %s products with " 3793 "productID(s) %s found as component for %s %s" % (prodType, 3794 utils.numberRange(missingComponentsOfType[prodType]), 3795 self.productType, combProdID)) 3796 3797 if self.productType == 'tile' and len(components) != len(reqComponents): 3798 components = None 3799 3800 if allOld and components: 3801 if self.productType == 'tile': 3802 # No deeps need to be made, just find tile. 3803 mfIDs = [comp.multiframeID for comp in components 3804 if comp.multiframeID > 0] 3805 3806 # @NOTE: Selecting non-deprecated here, but perhaps would be 3807 # better to select only tilestack frames instead/as well 3808 tiles = set(self.archive.query("combiframeID", 3809 fromStr="Provenance AS P, Multiframe AS M", 3810 whereStr="P.multiframeID IN (%s)" % ','.join(map(str, mfIDs)) 3811 + " AND M.multiframeID=P.combiframeID" 3812 + " AND frameType like '%tile%stack'" 3813 " AND (" + DepCodes.selectNonDeprecated + 3814 " OR deprecated=%s)" % DepCodes.reprocWFAU)) 3815 components = None 3816 self.prodMfID = tiles.pop() 3817 # @TODO: Set deprecated=0 3818 3819 if tiles: 3820 raise Cu13.CuError("Multiple CASU tiles found for %s " 3821 "productID = %d, when there should only be one" 3822 % (self.productType, combProdID)) 3823 if not self.doNotUpdate: 3824 self.archive.update("Multiframe", 3825 entryList=[("deprecated", 0)], 3826 where="multiframeID=%s and deprecated=%s" 3827 % (self.prodMfID, DepCodes.reprocWFAU)) 3828 self.archive.update("MultiframeDetector", 3829 entryList=[("deprecated", 0)], 3830 where="multiframeID=%s and deprecated=%s" 3831 % (self.prodMfID, DepCodes.reprocWFAU)) 3832 self.archive.update("Multiframe", 3833 entryList=[("deprecated", 0)], 3834 where="multiframeID in (select confID from Multiframe " 3835 "where multiframeID=%s) and deprecated=%s" 3836 % (self.prodMfID, DepCodes.reprocWFAU)) 3837 self.archive.update("MultiframeDetector", 3838 entryList=[("deprecated", 0)], 3839 where="multiframeID in (select confID from Multiframe " 3840 "where multiframeID=%s) and deprecated=%s" 3841 % (self.prodMfID, DepCodes.reprocWFAU)) 3842 else: # Check to see if product in ProgrammeFrame 3843 currProd = [prod.multiframeID for prod in self.existingProducts 3844 if (prod.productID == combProdID 3845 and prod.releaseNum <= self.releaseNum 3846 and self.sysc.productTypeOfFrame(prod.frameType) 3847 == self.productType)] 3848 3849 if currProd and not self.reprocess: 3850 self.prodMfID = currProd[0] 3851 components = None 3852 3853 if components: 3854 compProdTypes = set(comp.intProdType for comp in reqComponents) 3855 self.compProductType = compProdTypes.pop() 3856 if compProdTypes: 3857 raise Exception( 3858 "%s %d is made of more than one component types: %s" 3859 % (self.productType.title(), 3860 self.deepProductTable["productID"][self.index], 3861 ','.join(compProdTypes))) 3862 3863 return components
3864 3865 #-------------------------------------------------------------------------- 3866
3867 - def getExternalInputs(self):
3868 """ 3869 """ 3870 externalProd = self.archive.query("directory, fileNameRoot", 3871 "ExternalProduct", whereStr="productType='mosaic' AND releaseNum=%s " 3872 "AND programmeID=%s" % (self.releaseNum, self.programmeID), 3873 firstOnly=True, default=False) 3874 3875 if not externalProd: 3876 raise Cu13.CuError("No %s mosaic products available for release %s" 3877 % (self.programme.getAcronym(), self.releaseNum)) 3878 3879 Logger.addMessage("About to import data from " + externalProd.directory, 3880 alwaysLog=False) 3881 3882 inputImages = self.getCurrentSwpMos(externalProd) 3883 if inputImages: 3884 inputImage, inputWeight = inputImages 3885 return (dbc.intDefault(), inputImage, inputWeight, 3886 dbc.charDefault())
3887 3888 #-------------------------------------------------------------------------- 3889
3890 - def getImageDetails(self, images):
3891 """ 3892 Takes a list of database component image multiframeID, confidence image 3893 multiframeID, filename entries and returns a list of paths to the 3894 component images, confidence images as well as the total exposure time 3895 summed across all images for each detector. 3896 3897 """ 3898 intermitChan = 14 3899 numDets = (self.sysc.maxHDUs - 1 if self.compProductType == 'stack' else 3900 1) 3901 numOffsets = self.getNOffsets() 3902 expTimes = numpy.zeros([numOffsets, numDets], numpy.float32) 3903 weights = numpy.zeros([numOffsets, numDets], numpy.float32) 3904 invBScaleSqSumArray = numpy.zeros([numOffsets, numDets], numpy.float32) 3905 imageFiles = [] 3906 images = self.componentOrder if self.componentOrder else sorted(list(images), key=itemgetter(2)) 3907 for _prodID, mfID, imageName, confName, catName in images: 3908 # calculate BSCALE 3909 imageName = imageName.split(':')[-1] 3910 confName = confName.split(':')[-1] 3911 catName = catName.split(':')[-1] 3912 3913 if self.sysc.hasOffsetPos: 3914 hdulist = fits.open(imageName) 3915 offX, offY = (hdulist[0].header['OFFSET_X'], 3916 hdulist[0].header['OFFSET_Y']) 3917 3918 offsetPos = (astro.getOffSetPos(offX, offY) 3919 if self.isTile() else 0) 3920 else: 3921 offsetPos = 0 3922 3923 # Check each extension 3924 # Sort out any confidence images,totalExpTime 3925 if mfID > 0: 3926 detectors = self.archive.query( 3927 "extNum, deprecated, totalExpTime, dataScale", 3928 "MultiframeDetector", "multiframeID=%s" % mfID) 3929 3930 else: 3931 detectors = [] 3932 hdulist = fits.open(imageName) 3933 if not self.sysc.tileSuffix in imageName: 3934 for ii, hdu in enumerate(hdulist): 3935 if ii > 0: 3936 totalExpTime = hdu.header['TEXPTIME'] 3937 dataScale = (hdu.header['BSCALE'] 3938 if 'BSCALE' in hdu.header else 0.) 3939 3940 deprec = 0 if totalExpTime else DepCodes.missingDet 3941 detectors.append( 3942 (ii + 1, deprec, totalExpTime, dataScale)) 3943 else: 3944 totalExpTime = hdulist[0].header['TEXPTIME'] 3945 dataScale = (hdu.header['BSCALE'] 3946 if 'BSCALE' in hdu.header else 0.) 3947 3948 deprec = 0 if totalExpTime else DepCodes.missingDet 3949 detectors.append((1, deprec, totalExpTime, dataScale)) 3950 3951 depList = [deprecated for _eNum, deprecated, _tExpTime, _dScale in 3952 detectors] 3953 3954 if max(depList) > 0 and not self.notDepOnly: 3955 # this to correct the size. 3956 3957 # Create temporary confidence image. 3958 newConfImage = os.path.join(os.path.dirname(self.fileName()), 3959 os.path.basename(confName)) 3960 if os.path.exists(newConfImage): 3961 os.remove(newConfImage) 3962 #extp.run("llimcopy %s %s" % (confName, newConfImage)) 3963 shutil.copyfile(confName, newConfImage) 3964 confName = newConfImage 3965 # uncompress 3966 fits.uncompressFits([confName]) 3967 hdulist = fits.open(confName, 'update') 3968 for index, hdu in enumerate(hdulist): 3969 for extNum, deprecated, _tExpTime, _dScale in detectors: 3970 if (extNum == (index + 1) and deprecated > 0 and 3971 deprecated != DepCodes.intermitDet): 3972 # If extension is deprecated set confidence to 0 3973 hdu.data.fill(0) 3974 elif (extNum == (index + 1) and 3975 deprecated == DepCodes.intermitDet): 3976 naxis1 = hdu.header["NAXIS1"] 3977 naxis2 = hdu.header["NAXIS2"] 3978 xPosInit = (intermitChan - 1) * self.sysc.channelWidth 3979 xPosFin = (intermitChan * self.sysc.channelWidth + 3980 (naxis1 - self.sysc.instrumentXnPix)) 3981 hdu.data[0:naxis2, xPosInit:xPosFin].fill(0) 3982 fits.checkForUnicodeError(hdulist, confName) 3983 hdulist.close() 3984 3985 for extNum, deprecated, totalExpTime, dScale in detectors: 3986 # @NOTE: dep=51: intermittency - data is still used in deep. 3987 if deprecated in (0, 51): 3988 expTimes[offsetPos][extNum - 2] += totalExpTime 3989 weights[offsetPos][extNum - 2] += 1. 3990 if self.sysc.hasOffsetPos and dScale > 0: 3991 invBScaleSqSumArray[offsetPos][extNum - 2] \ 3992 += 1 / (dScale * dScale) 3993 3994 imageFiles.append(fits.FileList(imageName, confName, catName)) 3995 3996 # If tile -- sort out expTime 3997 if self.isTile(): 3998 tileSU = numOffsets // 3 3999 extNumYArr = numpy.zeros([int(4 * tileSU)], numpy.int32) 4000 # Set up extnum position in Y direction 4001 for pos in range(tileSU): 4002 extNumYArr[numpy.arange(0, 8, 2) + pos] \ 4003 = numpy.arange(0, 16, 4).astype(numpy.int32) 4004 4005 extNumXArr = numpy.zeros([12], numpy.int32) 4006 for pos in range(3): 4007 extNumXArr[numpy.arange(0, 12, 3) + pos] \ 4008 = numpy.arange(4).astype(numpy.int32) 4009 4010 offSetXArr = numpy.fmod(numpy.arange(11, -1, -1), 3) 4011 #tiledRegExpTime=numpy.zeros([11,int(4*tileSU)],numpy.float32) 4012 iden = numpy.ones([11, int(4 * tileSU)]) 4013 # Offset array 1 4014 offP1 = (numpy.multiply(iden, 3 * numpy.fmod(numpy.arange( 4015 iden.shape[1]), 2)) + numpy.transpose(numpy.multiply( 4016 numpy.transpose(iden), offSetXArr[:11]))) 4017 # extNum array 1 4018 extP1 = numpy.add(extNumYArr, numpy.transpose(numpy.multiply(numpy.transpose(iden), extNumXArr[:11]))) 4019 offP2 = (numpy.multiply(iden, 3 * numpy.fmod(numpy.arange( 4020 iden.shape[1]), 2)) + numpy.transpose(numpy.multiply( 4021 numpy.transpose(iden), offSetXArr[1:]))) 4022 extP2 = numpy.add(extNumYArr, numpy.transpose(numpy.multiply(numpy.transpose(iden), extNumXArr[1:]))) 4023 pos1 = (offP1.astype(numpy.int32), extP1.astype(numpy.int32)) 4024 pos2 = (offP2.astype(numpy.int32), extP2.astype(numpy.int32)) 4025 tiledRegExpTime = weights[pos1] * expTimes[pos1] + weights[pos2] * expTimes[pos2] 4026 tiledRegWeight = weights[pos1] + weights[pos2] 4027 # AVG where weight=2 4028 totalExpTimeList = [numpy.mean(tiledRegExpTime[numpy.nonzero( 4029 numpy.where(numpy.equal(tiledRegWeight, 2), 4030 tiledRegExpTime, 0.))])] 4031 4032 tiledRegInvSqBScale = weights[pos1] * invBScaleSqSumArray[pos1] \ 4033 + weights[pos2] * invBScaleSqSumArray[pos2] 4034 4035 tiledInvSqBScale = numpy.mean(tiledRegInvSqBScale[numpy.nonzero( 4036 numpy.where(numpy.equal(tiledRegWeight, 2), 4037 tiledRegInvSqBScale, 0.))]) 4038 self.bScale = [math.sqrt(1. / tiledInvSqBScale)] 4039 else: 4040 totalExpTimeList = list(expTimes.reshape(1, expTimes.size)[0]) 4041 if self.sysc.hasOffsetPos: 4042 self.bScale = numpy.sqrt(numpy.divide(1., numpy.where( 4043 numpy.greater(invBScaleSqSumArray, 0.), 4044 invBScaleSqSumArray, 1.)))[0, :] 4045 4046 return imageFiles, totalExpTimeList
4047 4048 #-------------------------------------------------------------------------- 4049
4050 - def getFrameType(self):
4051 """Use product type, amount of 4052 """ 4053 if self.sysc.hasMicroStep and self.deepProductTable["nustep"][self.index] > 1: 4054 frameType = 'deepleavstack' 4055 elif self.sysc.hasMicroStep and self.deepProductTable["nustep"][self.index] == 1: 4056 frameType = 'deepstack' 4057 elif self.sysc.hasMicroStep: 4058 raise Cu13.CuError("Nustep is set incorrectly in this %s %d" 4059 % (self.productType.title(), 4060 self.deepProductTable["productID"][self.index])) 4061 else: 4062 frameType = 'deepstack' 4063 if self.productType == 'stack': 4064 self.wcs = 'y' 4065 self.clip = 'y' 4066 return frameType 4067 elif self.productType == 'tile': 4068 return 'tile' + frameType 4069 elif self.productType == 'mosaic': 4070 if self.compProductType == 'tile': 4071 return 'mosaictile' + frameType 4072 else: 4073 return 'mosaic' + frameType
4074 4075 #-------------------------------------------------------------------------- 4076
4077 - def getCurrentSwpMos(self, extProduct):
4078 """ 4079 """ 4080 self.fTable.setCurRow( 4081 filterID=self.deepProductTable['filterID'][self.index]) 4082 4083 mosaicInfo = [self.deepProductTable['ra'][self.index], 4084 self.deepProductTable['dec'][self.index], 4085 self.deepProductTable['raExtent'][self.index], 4086 self.deepProductTable['decExtent'][self.index], 4087 self.deepProductTable['productID'][self.index], 4088 self.fTable.getAttr("shortName").title()] 4089 4090 mosaics = self.findExternalData(mosaicInfo, extProduct) 4091 4092 if not mosaics: 4093 Logger.addMessage("<Info> No mosaic found for productID = %s" 4094 % self.deepProductTable['productID'][self.index]) 4095 4096 self.extDirectory = extProduct.directory 4097 self.extFileRoot = extProduct.fileNameRoot 4098 self.inparFile = os.path.join(self.swarpdir, extProduct.directory, 4099 extProduct.fileNameRoot + "detCat.inpar") 4100 return mosaics
4101 4102 #-------------------------------------------------------------------------- 4103
4104 - def findExternalData(self, mosaicInfo, extProduct):
4105 """Checks directory for mosaics that match in ra,dec and filter 4106 """ 4107 ra, dec, raExtent, decExtent, productID, filtName = mosaicInfo 4108 swDir = os.path.join(self.swarpdir, extProduct.directory) 4109 4110 # Use WCS and filter data in header 4111 goodImages = [] 4112 goodWeights = [] 4113 for fName in os.listdir(swDir): 4114 prodNo = "_%s" % productID # FIXME Rob's fix means later more simple 4115 if self.sysc.mefType in fName \ 4116 and extProduct.fileNameRoot in fName and prodNo in fName: 4117 4118 # Possible FITS image. 4119 fileName = os.path.join(swDir, fName) 4120 Logger.addMessage("Checking file %s to see if it is a FITS image." 4121 % fileName, alwaysLog=False) 4122 4123 # Check number of extensions 4124 hdulist = fits.open(fileName) 4125 imageExtns = [ext for ext, hdu in enumerate(hdulist) 4126 if hdu.header['NAXIS'] == 2] 4127 4128 if len(imageExtns) is 1: 4129 4130 # get info 4131 extNo = imageExtns[0] 4132 try: 4133 raCen = hdulist[extNo].header['CRVAL1'] 4134 decCen = hdulist[extNo].header['CRVAL2'] 4135 filterName = hdulist[0].header['FILTER'].title() 4136 except KeyError: 4137 Logger.addMessage("WCS info is not in the image extension.") 4138 else: 4139 # Check for info in name 4140 # Weight 4141 isImage = 'weight' not in fName and 'conf' not in fName 4142 if isImage and filtName in filterName \ 4143 and ra - raExtent / 2 < raCen < ra + raExtent / 2 \ 4144 and dec - decExtent / 2 < decCen < dec + decExtent / 2: 4145 goodImages.append(fileName) 4146 goodWeights.append(fits.getSwarpConfMap(fileName)) 4147 4148 # Check that only whole image and weight image 4149 if len(goodImages) is 1: 4150 return goodImages[0], goodWeights[0]
4151 4152 #-------------------------------------------------------------------------- 4153
4154 - def getZeropoint(self):
4155 # Read in ZP, but compare to calculated/ 4156 4157 externalZP = self.archive.query( 4158 "zeroPoint,zpSystem", 4159 "ExternalProduct", 4160 "programmeID=%s AND releaseNum=%s AND productType='%s'" 4161 % (self.programmeID, self.releaseNum, self.productType), 4162 firstOnly=True) 4163 offset = (self.archive.query( 4164 "VegaToAB", "Filter", "filterID=%s" % 4165 self.deepProductTable["filterID"][self.index], 4166 firstOnly=True) if externalZP.zpSystem == 'AB' else 0.) 4167 return externalZP.zeroPoint - offset
4168 4169 #-------------------------------------------------------------------------- 4170
4171 - def catExists(self):
4172 """ 4173 """ 4174 return os.path.exists(self.catName())
4175 4176 #-------------------------------------------------------------------------- 4177
4178 - def catName(self):
4179 """ 4180 """ 4181 return fits.getCatalogue(self.fileName())
4182 4183 #-------------------------------------------------------------------------- 4184
4185 - def confName(self):
4186 """ 4187 """ 4188 return fits.getConfMap(self.fileName())
4189 4190 #-------------------------------------------------------------------------- 4191
4192 - def preGroutCatName(self):
4193 """ 4194 """ 4195 return self.catName().split('.')[0] + self.sysc.pregrtSuffix \ 4196 + self.sysc.catType
4197 4198 #-------------------------------------------------------------------------- 4199
4200 - def getSegmName(self):
4201 """ Segmentation image name 4202 """ 4203 # @TODO: Stick segmented image in archive. 4204 # Why have we got it here? 4205 # Why just VSA? 4206 if self.sysc.isVSA() and self.programme.getAttr("extractTool") == 'SEX': 4207 return self.fileName().split('.')[0] + '_segm' + self.sysc.mefType
4208 4209 #-------------------------------------------------------------------------- 4210
4211 - def creationDate(self):
4212 """Date of creation YYYYMMDD 4213 """ 4214 return os.path.basename(self.outputDir).split('_v')[0]
4215 4216 #-------------------------------------------------------------------------- 4217
4218 - def externalCatsExist(self):
4219 """ 4220 """ 4221 if self.getExternalCatName(): 4222 return os.path.exists(self.getExternalCatName())
4223 4224 #-------------------------------------------------------------------------- 4225
4226 - def fileName(self):
4227 """Filename 4228 """ 4229 if self.isComponent: 4230 return self.imagePathName 4231 prodID = self.deepProductTable["productID"][self.index] 4232 outputDir = (self.prodDirectory[self.productType][prodID] 4233 if prodID in self.prodDirectory[self.productType] else 4234 self.outputDir) 4235 return os.path.join(outputDir, self.imageBaseName())
4236 4237 #-------------------------------------------------------------------------- 4238
4239 - def filteredName(self):
4240 """ 4241 """ 4242 if not self.archive.isRealRun \ 4243 and self.sysc.testOutputPath() not in self.fileName(): 4244 4245 return fits.getFilteredImageName(self.fileName(), self._tmpWorkDir) 4246 4247 return fits.getFilteredImageName(self.fileName())
4248 4249 #-------------------------------------------------------------------------- 4250
4251 - def filtConfName(self):
4252 """ 4253 """ 4254 return fits.getConfMap(self.filteredName())
4255 4256 #-------------------------------------------------------------------------- 4257
4258 - def files(self):
4259 """ 4260 """ 4261 return fits.getFileList(self.fileName())
4262 4263 #-------------------------------------------------------------------------- 4264
4265 - def getAsciiCatName(self):
4266 """ 4267 """ 4268 return self.catName().replace(self.sysc.catType, '.cat')
4269 4270 #-------------------------------------------------------------------------- 4271
4272 - def getDeepExtList(self):
4273 """ 4274 """ 4275 if self.imageExists(): 4276 return fits.getExtList(self.fileName()) 4277 4278 return self.sysc.extListOfType(self.productType)
4279 4280 #-------------------------------------------------------------------------- 4281
4282 - def getExternalCatName(self):
4283 """ 4284 """ 4285 #filterID = self.deepProductTable["filterID"][self.index] 4286 #filterTable = df.Table("Filter", self.archive) 4287 #filterName = filterTable.getAttr("shortName", filterID=filterID) 4288 productID = self.deepProductTable["productID"][self.index] 4289 directory = os.path.join(self.swarpdir, self.extDirectory) 4290 posFiles = os.listdir(directory) 4291 goodFiles = [] 4292 for fName in posFiles: 4293 if self.extFileRoot + str(productID) in fName and \ 4294 fName.endswith('.cat'): 4295 goodFiles.append(os.path.join(directory, fName)) 4296 4297 if goodFiles: 4298 return goodFiles[0]
4299 4300 #-------------------------------------------------------------------------- 4301
4302 - def getInparName(self):
4303 """ 4304 """ 4305 return self.catName().replace(self.sysc.catType, '.inpar')
4306 4307 #-------------------------------------------------------------------------- 4308
4309 - def getNOffsets(self):
4310 """ 4311 """ 4312 if not self.isTile(): 4313 return 1 4314 return self.archive.queryNumRows("ProductLinks", 4315 "programmeID=%d AND combiProductID=%d AND combiProdType='tile'" 4316 " AND intProdType='stack'" % (self.programmeID, 4317 self.deepProductTable["productID"][self.index]))
4318 4319 #-------------------------------------------------------------------------- 4320
4321 - def getPointInfo(self):
4322 """ 4323 """ 4324 if self.productType == 'tile': 4325 self.nPointings = fits.open(self.fileName())[0].header['NOFFSETS'] 4326 self.nExtensions = self.sysc.maxHDUs - 1 4327 4328 if self.productType == 'mosaic': 4329 extNum = 2 4330 pixScale = fits.getPixelScale(self.fileName(), extNum) 4331 header = fits.open(self.fileName())[extNum - 1].header 4332 areaMosaic = header['NAXIS1'] * header['NAXIS2'] * (pixScale / 3600.) ** 2 4333 4334 self.nExtensions = \ 4335 len(self.sysc.extListOfType(self.compProductType)) 4336 4337 if self.compProductType == 'tile': 4338 self.nPointings = int(areaMosaic / self.sysc.tileArea) 4339 4340 elif self.compProductType == 'stack': 4341 self.nPointings = int(areaMosaic / self.sysc.pawPrintArea)
4342 4343 #-------------------------------------------------------------------------- 4344
4345 - def getPosAngle(self):
4346 if self.sysc.hasVarOrient: 4347 return self.deepProductTable["posAngle"][self.index]
4348 4349 #-------------------------------------------------------------------------- 4350
4351 - def imageBaseName(self):
4352 """ 4353 """ 4354 frameType = self.getFrameType() 4355 prodID = self.deepProductTable["productID"][self.index] 4356 dirDate = (os.path.basename(self.prodDirectory[self.productType][prodID]).split('_')[0] 4357 if prodID in self.prodDirectory[self.productType] else 4358 self.dirDate) 4359 deepFilePath = "%s%s_%05d%05d" \ 4360 % (self.sysc.wfauPrefix, dirDate, self.programmeID, prodID) 4361 4362 deepFilePath += fits.createSuffix(frameType) 4363 if self.part: 4364 deepFilePath += '_%d' % self.part 4365 4366 deepFilePath += self.sysc.mefType 4367 4368 return deepFilePath
4369 4370 #-------------------------------------------------------------------------- 4371
4372 - def imageExists(self):
4373 """ 4374 """ 4375 return os.path.exists(self.fileName())
4376 4377 #-------------------------------------------------------------------------- 4378
4379 - def isBig(self):
4380 """ 4381 """ 4382 return os.lstat(self.fileName())[6] >= 2 * self.sysc.one_gigabyte
4383 4384 #-------------------------------------------------------------------------- 4385
4386 - def isExternal(self):
4387 """ 4388 """ 4389 return self.external
4390 4391 #-------------------------------------------------------------------------- 4392
4393 - def isMef(self, fileName=None):
4394 """ 4395 """ 4396 if not fileName: 4397 return len(fits.open(self.fileName())) > 1 4398 4399 if not os.path.exists(fileName): 4400 return False 4401 4402 return len(fits.open(fileName)) > 1
4403 4404 #-------------------------------------------------------------------------- 4405
4406 - def isMosaic(self):
4407 """ 4408 """ 4409 return self.sysc.mosaicSuffix in self.fileName()
4410 4411 #-------------------------------------------------------------------------- 4412
4413 - def isTile(self):
4414 """ 4415 """ 4416 return self.sysc.tileSuffix in self.fileName() and not self.isMosaic()
4417 4418 #-------------------------------------------------------------------------- 4419
4420 - def provFile(self):
4421 """ 4422 """ 4423 if self.extDirectory: 4424 return os.path.join(self.swarpdir, self.extDirectory, "prov%s.dat" 4425 % os.path.basename(self.extDirectory))
4426 4427 #-------------------------------------------------------------------------- 4428
4429 - def sexInparFile(self):
4430 """ 4431 """ 4432 if os.path.exists(self.inparFile): 4433 return self.inparFile
4434 4435 #-------------------------------------------------------------------------- 4436
4437 - def stackPosition(self):
4438 """tuple(ra,dec) 4439 """ 4440 return (self.deepProductTable["ra"][self.index], 4441 self.deepProductTable["dec"][self.index])
4442
4443 #------------------------------------------------------------------------------ 4444 4445 -class Provenance(object):
4446 """ 4447 """ 4448 expTime = 1. 4449 background = 0. 4450 sd = 0. 4451 intProdType = None 4452 posAngle = None 4453 #-------------------------------------------------------------------------- 4454
4455 - def __init__(self, cu13Obj, bestProd):
4456 """Selects important header data from intermediate stacks. 4457 """ 4458 self.raBase = bestProd.deepProductTable['ra'][bestProd.index] / 15. 4459 self.decBase = bestProd.deepProductTable['dec'][bestProd.index] 4460 self.filterID = bestProd.deepProductTable['filterID'][bestProd.index] 4461 bestProd.fTable.setCurRow(filterID=self.filterID) 4462 self.filterName = bestProd.fTable.getAttr("shortName").upper() 4463 self.raExt = bestProd.deepProductTable['raExtent'][bestProd.index] 4464 self.decExt = bestProd.deepProductTable['decExtent'][bestProd.index] 4465 if cu13Obj.sysc.hasMicroStep: 4466 self.nustep = bestProd.deepProductTable['nustep'][bestProd.index] 4467 self.fieldID = bestProd.deepProductTable['fieldID'][bestProd.index] 4468 if cu13Obj.sysc.hasVarOrient: 4469 self.posAngle = bestProd.deepProductTable['posAngle'][bestProd.index] 4470 self.db = cu13Obj.archive 4471 self.sysc = cu13Obj.sysc 4472 self.programmeID = cu13Obj.programmeID 4473 self.prodName = bestProd.fileName() 4474 self.doNotUpdate = bestProd.doNotUpdate 4475 # add in totalExpTime, detector by detector 4476 4477 # UDS uses combination of different microstepped data 4478 # make sure no cock ups. 4479 ProvData = namedtuple("ProvData", "fieldID filterID fName utDate" 4480 " dateObs dateEnd mjdObs ra dec deprecated" 4481 " extensionList totExpTime isReprocess") 4482 4483 self.provenanceData = [] 4484 prodTypes = set() 4485 for line in file(bestProd.provFile()).readlines(): 4486 entry = ProvData(*line.split()) 4487 if (int(entry.filterID) == self.filterID and 4488 int(entry.fieldID) == self.fieldID): 4489 self.provenanceData.append((entry.fName, 4490 utils.makeDateTime(entry.utDate), 4491 utils.makeDateTime(entry.dateObs), 4492 utils.makeDateTime(entry.dateEnd), float(entry.mjdObs), 4493 float(entry.ra), float(entry.dec), int(entry.filterID), 4494 int(entry.deprecated), entry.extensionList, 4495 float(entry.totExpTime))) 4496 4497 prodTypes.add(self.db.sysc.productTypeOfFrame( 4498 fits.determineFrameType(entry.fName))) 4499 4500 self.intProdType = prodTypes.pop() 4501 if prodTypes: 4502 raise Cu13.CuError("Multiple input types for this product")
4503 4504 #-------------------------------------------------------------------------- 4505
4506 - def getInputStacks(self):
4507 """ 4508 """ 4509 return sorted(self.provenanceData, key=itemgetter(4))
4510 4511 #-------------------------------------------------------------------------- 4512
4513 - def getComponents(self):
4514 """ 4515 """ 4516 possFrames = self.db.query( 4517 "m1.multiframeID,m1.fileName as fileName," 4518 "m2.fileName as confName,m1.catName as catName,m1.deprecated", 4519 "Multiframe as m1,Multiframe as m2,ProgrammeFrame as p", 4520 "m1.confID=m2.multiframeID and m1.multiframeID=p.multiframeID and " 4521 "p.programmeID=%d and m1.frameType like '%%stack' " 4522 "and m1.filterID=%d" 4523 % (self.programmeID, self.filterID)) 4524 4525 # @FIXME: This list could contain None values, which will break 4526 # calcZP() - this needs to be accounted for somewhere. 4527 4528 return [self.getCompFromFName(provLine[0], possFrames) 4529 for provLine in self.provenanceData]
4530 4531 # Better to do one query in SQL and use python to find correct info 4532 #return [self.getCompFromFName(provLine[0]) 4533 # for provLine in self.provenanceData] 4534 4535 #-------------------------------------------------------------------------- 4536
4537 - def getCompFromFName(self, fileName, possFrames):
4538 """ 4539 """ 4540 fileName = os.path.join(os.path.basename(os.path.dirname(fileName)), 4541 os.path.basename(fileName)) 4542 for frame in possFrames: 4543 if frame.fileName.endswith(fileName): 4544 if DepCodes.nonDep < frame.deprecated < DepCodes.reprocCASU and not self.doNotUpdate and frame.deprecated != DepCodes.extProdComp: 4545 Logger.addMessage("Component, mfID=%d, of %s is deprecated" 4546 " with code %d: Changing deprecation code to %d" 4547 % (frame.multiframeID, self.prodName, frame.deprecated, 4548 DepCodes.extProdComp)) 4549 4550 self.db.update("Multiframe", 4551 "deprecated=%d" % DepCodes.extProdComp, 4552 "multiframeID=%d and deprecated!=%s" % (frame.multiframeID, DepCodes.extProdComp)) 4553 4554 self.db.update("MultiframeDetector", 4555 "deprecated=%d" % DepCodes.extProdComp, 4556 "multiframeID=%d AND deprecated!=%d and deprecated!=%d" 4557 % (frame.multiframeID, DepCodes.nonDep, DepCodes.extProdComp)) 4558 4559 if frame.deprecated < DepCodes.reprocCASU: 4560 return Cu13.ProductDetails(dbc.intDefault(), 4561 frame.multiframeID, frame.fileName, frame.confName, 4562 frame.catName)
4563 4564 #-------------------------------------------------------------------------- 4565
4566 - def getConstDict(self):
4567 """ 4568 """ 4569 # files may move on disk. 4570 if self.getInputStacks(): 4571 typStackFName = self.getInputStacks()[0][0].split(':')[-1] 4572 typStackFName = os.path.join( 4573 os.path.basename(os.path.dirname(typStackFName)), 4574 os.path.basename(typStackFName)) 4575 tables = ["Multiframe", "MultiframeDetector", "CurrentAstrometry"] 4576 if self.db.sysc.hasEsoRelease: 4577 tables.append("MultiframeEsoKeys") 4578 4579 hdrValues = self.db.query(metadataAttrs(self.db), 4580 fromStr=Join(tables, "multiframeID"), 4581 whereStr="MultiframeDetector.extNum=CurrentAstrometry.extNum" 4582 " AND Multiframe.deprecated<128" 4583 " AND fileName LIKE '%%%s'" % typStackFName, 4584 firstOnly=True) 4585 4586 return dict(zip(fitsKeys(self.db), hdrValues))
4587 4588 #-------------------------------------------------------------------------- 4589
4590 - def getProvInfo(self):
4591 """ 4592 """ 4593 # @FIXME: No nustep in OSA 4594 provDict = {'raBase': self.raBase, 4595 'decBase': self.decBase, 4596 'background': self.background, 4597 'sd': self.sd, 4598 'expTime': self.expTime, 4599 'filterID': self.filterID, 4600 'filterName': self.filterName, 4601 'posAngle': self.posAngle} 4602 if self.sysc.hasMicroStep: 4603 provDict['nustep'] = self.nustep 4604 4605 inputStacks = self.getInputStacks() 4606 if inputStacks: 4607 provDict['dateObs'] = fits.formatDateTime(inputStacks[0][2]) 4608 provDict['dateEnd'] = fits.formatDateTime( 4609 inputStacks[len(inputStacks) - 1][3]) 4610 utd = inputStacks[len(inputStacks) - 1][1] 4611 provDict['utDate'] = '%4u%02u%02u' % (utd.year, utd.month, utd.day) 4612 provDict['mjdObs'] = inputStacks[0][4] 4613 4614 return provDict
4615
4616 #------------------------------------------------------------------------------ 4617 4618 -def dualFilterOrder(pointings):
4619 """ 4620 """ 4621 order = [] 4622 for dID in set(pointing.dualID for pointing in pointings): 4623 4624 if any(pointing.filterID == dID and pointing.dualID != dID 4625 for pointing in pointings): 4626 4627 raise Cu13.CuError("Master image in dual image mode is not it's " 4628 "own master: %d" % dID) 4629 4630 dualFilterIndex = [pointing.index for pointing in pointings 4631 if pointing.filterID == dID] 4632 4633 if dID > 0 and not dualFilterIndex: 4634 raise Cu13.CuError("Dual filter image, %d, for pointing RA=%f, " 4635 "Dec=%f is not among data products" 4636 % (dID, pointings[0].ra, pointings[0].dec)) 4637 4638 order += dualFilterIndex 4639 order.extend(pointing.index for pointing in pointings 4640 if pointing.filterID != dID and pointing.dualID == dID) 4641 4642 return order
4643
4644 #------------------------------------------------------------------------------ 4645 4646 -def getFileSizeKiloBytes(fileName, oneExtOnly=False):
4647 """ TODO: Why not just: 4648 return os.path.getsize(fileName) / SystemConstants.one_kilobyte? 4649 """ 4650 memList = [] 4651 isSF = SystemConstants.tileSuffix in fileName and len(fits.open(fileName)) == 1 4652 4653 for hdu in fits.open(fileName): 4654 if isSF: 4655 try: 4656 mem = (hdu.header['NAXIS1'] * hdu.header['NAXIS2'] * 4657 abs(hdu.header['BITPIX'] / 8)) 4658 except KeyError: 4659 mem = 0 4660 else: 4661 try: 4662 key = ('Z' if hdu.header['XTENSION'] == 'BINTABLE' else '') 4663 mem = (hdu.header[key + 'NAXIS1'] * hdu.header[key + 'NAXIS2'] * 4664 abs(hdu.header[key + 'BITPIX'] / 8)) 4665 except KeyError: 4666 mem = 0 4667 if mem > 0: 4668 memList.append(mem) 4669 4670 if oneExtOnly: 4671 if memList: 4672 return memList.pop() / SystemConstants.one_kilobyte 4673 else: 4674 return 0. 4675 4676 return sum(memList) / SystemConstants.one_kilobyte
4677
4678 #------------------------------------------------------------------------------ 4679 4680 -def updateHeaders(bestProd, newZPDict, prov, isGrt=False):
4681 """ 4682 """ 4683 catName = (bestProd.preGroutCatName() if isGrt else bestProd.catName()) 4684 updateHead(catName, bestProd, newZPDict, prov, isCat=True) 4685 updateHead(bestProd.confName(), bestProd, newZPDict, prov) 4686 updateHead(bestProd.fileName(), bestProd, newZPDict, prov)
4687
4688 #------------------------------------------------------------------------------ 4689 4690 -def updateHead(imgName, bestProd, newZPDict, prov, isCat=False):
4691 """ 4692 """ 4693 deepExtList = bestProd.getDeepExtList() 4694 sysc = bestProd.sysc 4695 fitsFile = fits.open(imgName, "update") 4696 isPawPrint = not (sysc.tileSuffix in imgName or 4697 sysc.mosaicSuffix in imgName) 4698 for extNum, hdu in enumerate(fitsFile): 4699 if extNum > 0: 4700 if (extNum + 1) in deepExtList: 4701 zeroP, zpErr, numZP = newZPDict[extNum + 1] 4702 else: 4703 zeroP, zpErr, numZP = (dbc.realDefault(), dbc.realDefault(), 4704 dbc.intDefault()) 4705 if (zeroP < 0 or zpErr < 0) and hdu.header['TEXPTIME'] > 0: 4706 Logger.addMessage( 4707 "<WARNING> The zero-point is %s and the " 4708 "zero-point error is %s for ext: %d in %s" % 4709 (zeroP, zpErr, extNum, imgName)) 4710 fixHistory(hdu) 4711 hdu.header.update('MAGZPT', round(zeroP, 4), 4712 'Photometric ZP (mags) for default extinction') 4713 4714 hdu.header.update('MAGZRR', round(zpErr, 4), 4715 'Photometric ZP error (mags)') 4716 4717 hdu.header.update('NUMZPT', numZP, 'Number of standards used') 4718 hdu.header.update('EXTINCT', Cu13.extinct, 4719 'Extinction coefficient (mags)') 4720 4721 hdu.header.update('NICOMB', prov.getNoRawFrames(), 4722 'Number of normal images in this stack') 4723 4724 4725 if isCat and not sysc.hasEsoRelease: 4726 hdu.header.update(sysc.amStart, Cu13.airmass, 4727 'Airmass at start of observation') 4728 4729 hdu.header.update(sysc.amEnd, Cu13.airmass, 4730 'Airmass at end of observation') 4731 4732 if isCat and sysc.pregrtSuffix in imgName: 4733 # Edit Provenance 4734 provNo = 0 4735 provKey = 'PROV%04d' % provNo 4736 while provKey in hdu.header: 4737 # @FIXME: Always update primary header for each HDU? 4738 # Surely this should be done once? 4739 fitsFile[0].header.update(provKey, 4740 hdu.header[provKey], 4741 'Card # %s' % (provNo)) 4742 provNo += 1 4743 provKey = 'PROV%04d' % provNo 4744 4745 if isCat: 4746 hdu.header.update('MJD-OBS', prov.getMjdObs(), 4747 "Start MJD of observation") 4748 hdu.header.update('MJD-END', prov.getMjdEnd(), 4749 "End MJD of observation") 4750 # @TODO: in final WSA release also add in MEANMJD 4751 if isPawPrint and sysc.hasEsoRelease: 4752 hdu.header.update('MEANMJD', prov.getMeanMjdObs(extNum + 1), 4753 'Mean MJD of all images comprising this image') 4754 4755 if extNum is 0 and (sysc.hasEsoRelease or not isCat): 4756 hdu.header.update(sysc.amStart, Cu13.airmass, 4757 'Airmass at start of observation') 4758 4759 hdu.header.update(sysc.amEnd, Cu13.airmass, 4760 'Airmass at end of observation') 4761 4762 # @TODO: not in tiles 4763 for kywrd in ['MAGZPT', 'MAGZRR', 'EXTINCT']: 4764 try: 4765 del hdu.header[kywrd] 4766 except KeyError: 4767 pass 4768 4769 if extNum is 0 and isCat and sysc.hasEsoRelease: 4770 comment = 'Corrected to ndit' if sysc.isVSA() else 'Exposure Time' 4771 hdu.header.update(sysc.expTimeKeyWord, 4772 fitsFile[1].header[sysc.expTimeKeyWord], 4773 comment) 4774 posAngle = bestProd.getPosAngle() 4775 hdu.header.update("HIERARCH ESO TEL POSANG", 4776 posAngle, "ESO POSITION ANGLE") 4777 if extNum is 0 and not isCat: 4778 hdu.header.update('MJD-OBS', prov.getMjdObs(), 4779 "Start MJD of observation") 4780 hdu.header.update('MJD-END', prov.getMjdEnd(), 4781 "End MJD of observation") 4782 if not isPawPrint: 4783 hdu.header.update('TEXPSUM', prov.getSumTotalExpTime(), 4784 'Total integration time of all exposures (s)') 4785 fits.checkForUnicodeError(fitsFile, imgName) 4786 fitsFile.close()
4787
4788 #------------------------------------------------------------------------------ 4789 4790 -def convertSFtoMEF(singleFITSName, MEFName, fileType, extractTool, normFac):
4791 """ 4792 """ 4793 phdu = pyfits.PrimaryHDU() 4794 hdulist = fits.open(singleFITSName) 4795 originalHeader = hdulist[0].header 4796 originalData = hdulist[0].data 4797 if fileType == 'weight' and extractTool == 'CASU': 4798 originalData = numpy.multiply(originalData, normFac).astype(numpy.uint8) 4799 hdulist.close() 4800 nHdulist = pyfits.HDUList([phdu, pyfits.ImageHDU(originalData)]) 4801 nHdulist.writeto(MEFName) 4802 4803 return originalHeader
4804
4805 #------------------------------------------------------------------------------ 4806 4807 -def createNewHeader(header, extType):
4808 """Remove values, so that you don't mess up. 4809 """ 4810 keeplist = ['XTENSION', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2', 'PCOUNT', 'GCOUNT'] 4811 if extType == 'primary': 4812 newHeader = pyfits.PrimaryHDU().header 4813 else: 4814 cards = header.ascard 4815 newHeader = pyfits.ImageHDU().header 4816 for kk in range(len(keeplist)): 4817 kywrd = keeplist[kk] 4818 crd = cards[kywrd] 4819 if kk > 0: 4820 after = keeplist[kk - 1] 4821 newHeader.update(crd.key, crd.value, crd.comment, after=after) 4822 else: 4823 newHeader.update(crd.key, crd.value, crd.comment, before='BITPIX') 4824 4825 return newHeader
4826
4827 #------------------------------------------------------------------------------ 4828 4829 -def fixHistory(hdu):
4830 """ Edits CASU histories, so that they conform to new WFAU standard. 4831 """ 4832 cards = hdu.header.ascard 4833 tStamps = [] 4834 historyInfo = [] 4835 cardNos = [] 4836 today = utils.makeDateTime().strftime("%Y%m%d") 4837 4838 # @FIXME: Is history info getting munged? 4839 for card in range(len(cards) - 1): 4840 if (cards[card].key == 'HISTORY' 4841 and cards[card].value.split()[0] == today 4842 and cards[card + 1].key == 'HISTORY' 4843 and cards[card].value.split()[0] != today): 4844 4845 tStamps.append(cards[card].value) 4846 _init, casuProg, casuVNum, progDate, progTime, curator, _final1, _fina2 = \ 4847 cards[card + 1].value.split() 4848 4849 historyInfo.append((casuProg, casuVNum, progDate, progTime, curator)) 4850 cardNos.append(card) 4851 cardNos.append(card + 1) 4852 4853 for cc in range(len(cardNos)): 4854 del hdu.header[cardNos[cc] - cc] 4855 4856 for hh in range(len(tStamps)): 4857 historyString = '%s %s %s %s at %s' % (historyInfo[hh][0], 4858 historyInfo[hh][1], historyInfo[hh][2], historyInfo[hh][3], tStamps[hh]) 4859 4860 hdu.header.add_history(historyString)
4861
4862 #------------------------------------------------------------------------------ 4863 4864 -def getNormalisationConst(mosaicObj):
4865 """ Find normalisation factor for weight image. 4866 """ 4867 fitsFile = fits.open(mosaicObj.imageName) 4868 image = fitsFile[1 if mosaicObj.isMef(mosaicObj.imageName) else 0].data 4869 4870 return 100 / stats.clippedMADHistogram(image)[0]
4871
4872 #------------------------------------------------------------------------------ 4873 # CU13 Mosaic Constants 4874 4875 -def fitsKeys(db):
4876 """ @return: Mosaic FITS header keys. 4877 @rtype: list(str) 4878 """ 4879 if db.sysc.isWSA(): 4880 return ['PROJECT', 'TELESCOP', 'INSTRUME', 'DHSVER', 'HDTFILE', 'OBSERVER', 4881 'USERID', 'OBSREF', 'RECIPE', 'TRACKSYS', 'NEXP', 'NINT', 'READINT', 4882 'NREADS', 'READMODE', 'DETECTOR', 'DETECTID', 'DROWS', 'DCOLUMNS', 4883 'RDOUT_X1', 'RDOUT_X2', 'RDOUT_Y1', 'RDOUT_Y2', 'PIXLSIZE', 'GAIN', 4884 'HDTFILE2', 'READNOIS', 'DECURTN', 'CRUNIT1', 'CRUNIT2'] 4885 4886 elif db.sysc.isVSA(): 4887 return ['PROJECT', 'TELESCOP', 'INSTRUME', 'OBSERVER', 'RECIPE', 'DETECTOR', 4888 'DETECTID', 'DROWS', 'DCOLUMNS', 'GAINCOR', 4889 'HIERARCH ESO DET CHIP NO', 'CRUNIT1', 'CRUNIT2', 4890 'HIERARCH ESO OBS PROG ID'] 4891 elif db.sysc.isOSA(): 4892 # @TODO: If mosaics needed 4893 Cu13.CuError("Check which of these is needed")
4894 #------------------------------------------------------------------------------
4895 4896 -def metadataAttrs(db):
4897 """ @return: Names of the database table attributes that provide the Mosaic 4898 FITS metadata. 4899 @rtype: str 4900 """ 4901 if db.sysc.isWSA(): 4902 return \ 4903 "Multiframe." + ", Multiframe.".join( 4904 ["project", "telescope", "instrument", "dhsVersion", "hdtFile", 4905 "observer", "userid", "obsref", "recipe", "trackSys", "numExp", 4906 "numInts", "readInterval", "numReads"]) + ", "\ 4907 "MultiframeDetector." + ", MultiframeDetector.".join( 4908 ["readMode", "detectorType", "deviceID", "detRows", "detCols", 4909 "startCol", "endCol", "startRow", "endRow", "pixelScale", "gain", 4910 "hdtFileExt", "readNoise", "decurtFlag"]) + ", "\ 4911 "CurrentAstrometry." + ", CurrentAstrometry.".join( 4912 ["crunit1", "crunit2"]) 4913 4914 elif db.sysc.isVSA(): 4915 return \ 4916 "Multiframe." + ", Multiframe.".join( 4917 ["project", "telescope", "instrument", "observer", "recipe"]) + ", "\ 4918 "MultiframeDetector." + ", MultiframeDetector.".join( 4919 ["detectorType", "deviceID", "detRows", "detCols", "gainCor", "camNum"]) + ", "\ 4920 "CurrentAstrometry." + ", CurrentAstrometry.".join( 4921 ["crunit1", "crunit2"]) + ", MultiframeEsoKeys." + ", MultiframeEsoKeys.".join( 4922 ["obsProgID"]) 4923 elif db.sysc.isOSA(): 4924 # @TODO: If mosaics needed 4925 Cu13.CuError("Check which of these is needed")
4926
4927 #------------------------------------------------------------------------------ 4928 4929 -def astrometry(imghdr, inputHeader, constDict):
4930 """Fix up astrometry keywords. 4931 """ 4932 imghdr.update('CTYPE1', inputHeader['CTYPE1'], 'Algorithm type for axis 1') 4933 imghdr.update('CTYPE2', inputHeader['CTYPE2'], 'Algorithm type for axis 2') 4934 imghdr.update('CRPIX1', inputHeader['CRPIX1'], 'Dither offset X') 4935 imghdr.update('CRPIX2', inputHeader['CRPIX2'], 'Dither offset Y') 4936 imghdr.update('CRVAL1', inputHeader['CRVAL1'], '[deg] Right ascension at the reference pixel') 4937 imghdr.update('CRVAL2', inputHeader['CRVAL2'], '[deg] Declination at the reference pixel') 4938 try: 4939 imghdr.update('CRUNIT1', inputHeader['CUNIT1'], 'Unit of right ascension co-ordinates') 4940 except KeyError: 4941 try: 4942 imghdr.update('CRUNIT1', inputHeader['CRUNIT1'], 'Unit of right ascension co-ordinates') 4943 except KeyError: 4944 imghdr.update('CRUNIT1', constDict['CRUNIT1'], 'Unit of right ascension co-ordinates') 4945 try: 4946 imghdr.update('CRUNIT2', inputHeader['CUNIT2'], 'Unit of declination co-ordinates') 4947 except KeyError: 4948 try: 4949 imghdr.update('CRUNIT2', inputHeader['CRUNIT2'], 'Unit of declination co-ordinates') 4950 except KeyError: 4951 imghdr.update('CRUNIT2', constDict['CRUNIT2'], 'Unit of declination co-ordinates') 4952 imghdr.update('CD1_1', inputHeader['CD1_1'], 'Transformation matrix element') 4953 imghdr.update('CD1_2', inputHeader['CD1_2'], 'Transformation matrix element') 4954 imghdr.update('CD2_1', inputHeader['CD2_1'], 'Transformation matrix element') 4955 imghdr.update('CD2_2', inputHeader['CD2_2'], 'Transformation matrix element') 4956 try: 4957 imghdr.update('PV2_1', inputHeader['PV2_1'], 4958 'Coefficient for r term') 4959 except KeyError: 4960 pass 4961 try: 4962 imghdr.update('PV2_2', inputHeader['PV2_2'], 4963 'Coefficient for r**2 term ') 4964 except KeyError: 4965 pass 4966 try: 4967 imghdr.update('PV2_3', inputHeader['PV2_3'], 4968 'Coefficient for r**3 term ') 4969 except KeyError: 4970 pass 4971 4972 return imghdr
4973
4974 #------------------------------------------------------------------------------ 4975 4976 -def checkList(keyword):
4977 """Make sure header keyword should not populate image header. 4978 """ 4979 # remove any spaces 4980 keyword = keyword.split()[0] 4981 outVal = 1 4982 listAstrom = ['CTYPE1', 'CTYPE2', 'CRPIX1', 'CRPIX2', 'CRVAL1', 'CRVAL2', 'CUNIT1', 4983 'CUNIT2', 'CRUNIT1', 'CRUNIT2', 'CD1_1', 'CD1_2', 'CD2_1', 'CD2_2', 'NAXIS', 4984 'NAXIS1', 'NAXIS2'] 4985 for kywd in listAstrom: 4986 if kywd == keyword: 4987 outVal = 0 4988 return outVal
4989
4990 #------------------------------------------------------------------------------ 4991 4992 -def getComment(wrds):
4993 """ returns comment string 4994 """ 4995 comment = '' 4996 noWrds = len(wrds) 4997 indx = 0 4998 for ww in range(noWrds): 4999 if wrds[ww] == '/': 5000 indx = ww + 1 5001 continue 5002 noWrds = noWrds - indx 5003 for ii in range(noWrds): 5004 comment += wrds[indx + ii] + ' ' 5005 return comment[:-1]
5006
5007 #------------------------------------------------------------------------------ 5008 5009 -def dec2sex(decimalValue, noDecPlc):
5010 """Coverts value from decimal to sexidecimal 5011 """ 5012 # @FIXME: This duplicates EsoRelease function that also should be 5013 # in Utilities! 5014 sign = int(decimalValue / abs(decimalValue)) 5015 decimalValue = abs(decimalValue) 5016 hrs = int(decimalValue) 5017 mnValue = 60. * (decimalValue - hrs) 5018 mns = int(mnValue) 5019 scValue = 60. * (mnValue - mns) 5020 scs = round(scValue, noDecPlc) 5021 if noDecPlc > 0: 5022 sfmt = "%0" + str(3 + noDecPlc) + "." + str(noDecPlc) + "f" 5023 else: 5024 sfmt = "%02d" 5025 stValue = str("%02d" % (hrs)) + ' ' + str("%02d" % (mns)) + ' ' + str(sfmt % (scs)) 5026 if sign == -1: 5027 stValue = '-' + stValue 5028 5029 return stValue
5030
5031 #------------------------------------------------------------------------------ 5032 5033 -def addAdditionalPadding(header, minNoItems):
5034 """Adds padding to fill up header, so is not too short. 5035 """ 5036 paddingLine = "WFAU added block padding" 5037 for _line in range(minNoItems - len(header.ascard)): 5038 header.add_comment(paddingLine) 5039 return header
5040
5041 #------------------------------------------------------------------------------ 5042 5043 -def reOrder(productList, dualImages):
5044 """ Reorders productList such that dualImages are always first, and 5045 order within these two categories is by productID. 5046 """ 5047 allNewProds = set(productList) 5048 newProdList = sorted(allNewProds.intersection(dualImages)) 5049 newProdList += sorted(allNewProds.difference(dualImages)) 5050 5051 return newProdList
5052
5053 #------------------------------------------------------------------------------ 5054 5055 -def getRobustLimit(magData, magErrData, expML):
5056 """Find best fit function. 5057 """ 5058 sigma = 5. # 5 sigma limit. 5059 rmsLimit = 2.5 / (sigma * math.log(10)) 5060 minMag, maxMag = (expML - 8.), (expML + 2.) 5061 indices = numpy.nonzero(numpy.logical_and( 5062 numpy.greater_equal(magData, minMag), 5063 numpy.less_equal(magData, maxMag))) 5064 selData = numpy.zeros([indices[0].size], 5065 dtype=[('mag', numpy.float64), ('magErr', numpy.float64)]) 5066 selData['mag'] = magData[indices] 5067 selData['magErr'] = magErrData[indices] 5068 selData = numpy.sort(selData, order='mag') 5069 nBins = min(max(selData.shape[0] // Cu13.numObjsBin, 10), 50) 5070 nObjsBin = selData.shape[0] // nBins 5071 nearBin = 0 5072 if selData.shape[0] > 20: 5073 param = [0.0, 1.0, 0.] 5074 binData = [] 5075 alreadyPassed = False 5076 for magBin in range(nBins): 5077 subSelData = selData[magBin * nObjsBin : (magBin + 1) * nObjsBin] 5078 bestMag = numpy.median(subSelData['mag']) 5079 medErr, madErr = stats.calcMAD(subSelData['magErr'], 0.000001) 5080 if (medErr) > rmsLimit and not alreadyPassed: 5081 nearBin = magBin 5082 alreadyPassed = True 5083 binData.append((float(bestMag), float(medErr), 5084 (1.48 * float(madErr)) / math.sqrt(nObjsBin - 1))) 5085 5086 if not alreadyPassed and (medErr / rmsLimit) > 0.85: 5087 nearBin = magBin 5088 nearMagValue = binData[nearBin][0] 5089 # Find bins around nearBin 5090 firstBin = max(nearBin - 2, 0) 5091 lastBin = min(firstBin + 5, nBins) 5092 firstBin = lastBin - 5 5093 binData = [data for ii, data in enumerate(binData) 5094 if ii >= firstBin and ii < lastBin] 5095 5096 maxErr = max([err for _mag, _rms, err in binData]) 5097 minMag = binData[0][0] - 20 * maxErr 5098 maxMag = binData[-1][0] + 20 * maxErr 5099 if maxMag < (expML + 0.5): 5100 maxMag = expML + 0.5 5101 fit = LSQ.polynomialLeastSquaresFit(param, binData) 5102 #nDof=len(binData)-3 5103 else: 5104 fit = ((dbc.realDefault(), dbc.realDefault(), dbc.realDefault()), 5105 dbc.realDefault()) 5106 #nDof= dbc.intDefault() 5107 # Now find where 5108 if fit[1] > 0.: 5109 soln = fit[0] 5110 soln[0] -= rmsLimit 5111 soln2 = [soln[len(soln) - ii - 1] for ii, _pc in enumerate(soln)] 5112 roots = numpy.roots(soln2) 5113 # Find solution in range 5114 nearestRootList = [(root, abs(root - nearMagValue)) for root in roots] 5115 nearestRoot = min(nearestRootList, key=itemgetter(1))[0] 5116 # if root > minMag and root < maxMag: 5117 # return root 5118 return nearestRoot 5119 return dbc.realDefault()
5120 5121 5122 #------------------------------------------------------------------------------ 5123 5124 # @TODO: This function is incomplete... 5125 5126 #def getSaturLimit(satLevel, fluxData, magData, classData): 5127 # """ 5128 # """ 5129 # selData = numpy.zeros([fluxData.size], dtype=[('flux', numpy.float64), 5130 # ('mag', numpy.float64), ('cls', numpy.int16)]) 5131 # 5132 # selData['flux'] = fluxData 5133 # selData['mag'] = magData 5134 # selData['cls'] = classData 5135 # indices = numpy.nonzero(numpy.equal(selData['cls'], -9)) 5136 # 5137 # medM, minM, maxM = \ 5138 # stats.clippedMADHistogram(selData[indices]['mag'], minSd=0.00001, 5139 # retValues="med,min,max") 5140 # 5141 # indices = numpy.nonzero(numpy.logical_and( 5142 # numpy.greater_equal(selData['flux'], (0.85 * satLevel)), 5143 # numpy.less_equal(selData['flux'], (1.15 * satLevel)))) 5144 # 5145 # satMag = stats.clippedMADHistogram(selData[indices]['mag'], minSd=0.00001, 5146 # retValues="med") 5147 # 5148 # return maxM 5149 5150 #------------------------------------------------------------------------------ 5151 # Entry point for CU13 5152 5153 # Allow module to be imported as well as executed from the command line 5154 if __name__ == "__main__": 5155 5156 # Define specific command-line interface settings required by CU13 5157 CLI.progArgs += [ 5158 CLI.Argument("programmeID", "DXS"), 5159 CLI.Argument("begin_date", "05A", isValOK=CLI.isDateOK), 5160 CLI.Argument("end_date", "05A", isValOK=CLI.isDateOK), 5161 CLI.Argument("release_number", '3', isValOK=lambda val: val.isdigit())] 5162 5163 CLI.progOpts += [ 5164 CLI.Option('C', "casuToolsDir", 5165 "CASUTOOLS code directory to use. Default is current soft-linked directory", 5166 "NAME"), 5167 5168 CLI.Option('E', 'extrTool', 5169 "manually override extraction tool choice", 5170 "NAME", isValOK=lambda x: x.upper() == 'CASU' or x.upper() == 'SEX'), 5171 5172 CLI.Option('F', "stackOnly", 5173 "Do stacks only"), 5174 5175 CLI.Option('N', "num_parts", 5176 "Split stacking into this number of parts", 5177 "NUMBER", isValOK=lambda val: val.isdigit()), 5178 5179 CLI.Option('P', "reuse", 5180 "use already produced stack in this stack"), 5181 5182 CLI.Option('R', "restart", 5183 "restart a test in given directory from where it got to", 5184 "NAME"), 5185 5186 CLI.Option('S', "stacker", 5187 "code to use for stacking", 5188 "FILE", Cu13.stacker), 5189 5190 CLI.Option('f', "file_list", 5191 "save list of deep stacks created to this file path", 5192 "FILE"), 5193 5194 CLI.Option('l', "products", 5195 "comma separated list of highest layer of products e.g. mosaic " 5196 "products if stacks and mosaics, tile products if stacks and tiles or " 5197 "stacks if just stacks", 5198 "LIST"), 5199 5200 CLI.Option('m', "multi_stage", 5201 "Divide stacking into multiple stages to save memory"), 5202 5203 CLI.Option('n', "minProdID", 5204 "start at this productID in highest layer of products", 5205 "NUMBER", isValOK=lambda val: val.isdigit()), 5206 5207 CLI.Option('o', "dir_date", 5208 "output directory date", 5209 "DATE", Cu13.dirDate, 5210 isValOK=lambda val: len(val) >= 8 and CLI.isDateOK(val)), 5211 5212 CLI.Option('p', "reprocess", 5213 "Comma separated list of reprocess product types; e.g. stack,tile", "LIST"), 5214 5215 CLI.Option('q', "notDepOnly", 5216 "Do not use deprecated extensions"), 5217 5218 CLI.Option('r', "redo_header", 5219 "Do not reproduce stack - just update the header"), 5220 5221 CLI.Option('s', "num_stack", 5222 "number of intermediate stacks to use", 5223 "NUMBER", isValOK=lambda val: val.isdigit()), 5224 5225 CLI.Option('v', "verbose", 5226 "more verbose logging"), 5227 5228 CLI.Option('x', "maxProdID", 5229 "finish at this productID in highest layer of products", 5230 "NUMBER", isValOK=lambda val: val.isdigit()), 5231 5232 CLI.Option('y', "threads", 5233 "maximum number of processors to use (minimum allowed is %s)" 5234 % SystemConstants.minNumCPUs, 5235 "NUMBER", str(Cu13.numCPUs), isValOK=lambda val: val.isdigit()), 5236 5237 CLI.Option('z', "uncompressed", 5238 "Leave deep stacks uncompressed")] 5239 5240 cli = CLI(Cu13, "$Revision: 10077 $") 5241 Logger.isVerbose = cli.getOpt("verbose") 5242 Logger.addMessage(cli.getProgDetails()) 5243 CLI.check64bitServer() 5244 cu = Cu13(cli.getArg("programmeID"), cli=cli) 5245 try: 5246 cu.dateRange = cu.sysc.obsCal.dateRange(cli.getArg("begin_date"), 5247 cli.getArg("end_date")) 5248 except Exception as error: 5249 eType = "Invalid Option" 5250 Logger.addExceptionMessage(error, eType) 5251 raise SystemExit(eType + ": see log " + cu._log.pathName) 5252 5253 cu.dirDate = \ 5254 utils.makeDateTime(cli.getOpt("dir_date")).date.replace('-', '') 5255 5256 if cli.getOpt('extrTool'): 5257 cu.extractor = cli.getOpt('extrTool').upper() 5258 5259 if cli.getOpt("maxProdID"): 5260 cu.maxProdID = int(cli.getOpt("maxProdID")) 5261 5262 if cli.getOpt("minProdID"): 5263 cu.minProdID = int(cli.getOpt("minProdID")) 5264 5265 if cli.getOpt("multi_stage") and cli.getOpt("num_parts"): 5266 cu.numParts = int(cli.getOpt("num_parts")) 5267 5268 if cli.getOpt("num_stack"): 5269 cu.numStack = int(cli.getOpt("num_stack")) 5270 5271 if cli.getOpt("products"): 5272 cu.reqProdIDs = csv.values(cli.getOpt("products")) 5273 5274 if cli.getOpt("reprocess"): 5275 cu.reprocessTypes = csv.values(cli.getOpt("reprocess")) 5276 5277 cu.breakIntoParts = cli.getOpt("multi_stage") 5278 cu.casuSoftDir = cli.getOpt("casuToolsDir") 5279 cu.fileList = cli.getOpt("file_list") 5280 cu.isCompressed = not cli.getOpt("uncompressed") 5281 cu.notDepOnly = cli.getOpt("notDepOnly") 5282 cu.numCPUs = int(cli.getOpt("threads")) 5283 cu.redoHeader = cli.getOpt("redo_header") 5284 cu.releaseNum = int(cli.getArg("release_number")) 5285 cu.restartDir = cli.getOpt("restart") 5286 cu.stacker = cli.getOpt("stacker") 5287 cu.useDeep = cli.getOpt("reuse") 5288 cu.stackOnly = cli.getOpt("stackOnly") 5289 cu.run() 5290 5291 #------------------------------------------------------------------------------ 5292