Package invocations :: Package monitoring :: Module createCalendar
[hide private]

Source Code for Module invocations.monitoring.createCalendar

   1  #! /usr/bin/env python 
   2  #------------------------------------------------------------------------------ 
   3  #$Id: createCalendar.py 10243 2014-03-11 17:15:00Z EckhardSutorius $ 
   4  """ 
   5     Create semester-wise web pages containing an overview of CUs 1 to 4. 
   6   
   7     @author: E. Sutorius 
   8     @org:    WFAU, IfA, University of Edinburgh 
   9  """ 
  10  #------------------------------------------------------------------------------ 
  11  from __future__ import division, print_function 
  12  from   collections import defaultdict, namedtuple 
  13  import mx.DateTime     as mxTime 
  14  from   numpy       import array 
  15  import os 
  16  import re 
  17  import threading, Queue 
  18  import time 
  19  import sys 
  20   
  21  from   wsatools.CLI                 import CLI 
  22  import wsatools.CSV                     as csv 
  23  import wsatools.DbConnect.DbConstants   as dbc 
  24  from   wsatools.DbConnect.DbSession import DbSession, Join 
  25  from   wsatools.FitsUtils           import FitsList 
  26  from   wsatools.File                import HTMLFile, PickleFile 
  27  from   wsatools.Logger              import Logger 
  28  import mx.ODBC.unixODBC                 as odbc 
  29  from   wsatools.SystemConstants     import SystemConstants 
  30  import wsatools.Utilities               as utils 
  31  #------------------------------------------------------------------------------ 
  32  # Generally used variables 
  33  Programmes = namedtuple("Programmes", "fullList, description") 
  34  AdditDbSessions = namedtuple("AdditDbSessions", "name, connection, versions") 
  35  dateFormat = "%04d%02d%02d" 
  36  dateVersFormat = "%4d%02d%02d_v%s" 
  37  sqlFormat = "%4d-%02d-%02d" 
  38  deprecationComp = "<128" # "<128" # "=0" 
  39  verbose = 0 
  40  #------------------------------------------------------------------------------ 
41 -def fixPointEight(dateVersStr):
42 if dateVersStr.endswith("v0.8"): 43 dateVersStr = dateVersStr.rpartition('.')[0] 44 elif dateVersStr.endswith("v0"): 45 dateVersStr = dateVersStr + ".8" 46 return dateVersStr
47
48 -def createStatsFile(dirName):
49 fileName = os.path.join(dirName, ".stats_%s" % os.path.basename(dirName)) 50 return PickleFile(fileName)
51
52 -def compare(value, comparison):
53 test = False 54 compStr = '' 55 valStr = '' 56 for x in comparison: 57 if x in "<>=!": 58 compStr += x 59 elif x in "+-.": 60 valStr += x 61 elif x.isdigit(): 62 valStr += x 63 try: 64 valType = (float if '.' in valStr else int) 65 compVal = valType(valStr) 66 except ValueError: 67 raise SystemExit("Wrong comparison: %s [%s, %s]" % ( 68 comparison, compStr, valStr)) 69 70 if compStr in ("=", "=="): 71 return value == compVal 72 if compStr == "!=": 73 return value != compVal 74 if compStr == ">": 75 return value > compVal 76 if compStr == "<": 77 return value < compVal 78 if compStr == ">=": 79 return value >= compVal 80 if compStr == "<=": 81 return value <= compVal
82 83 #------------------------------------------------------------------------------ 84
85 -class WorkerThread(threading.Thread):
86 """ A worker thread that takes directory names from a queue, finds all 87 files in them recursively and reports the result. 88 89 Input is done by placing directory names (as strings) into the 90 Queue passed in dir_q. 91 92 Output is done by placing tuples into the Queue passed in result_q. 93 Each tuple is (thread name, dirname, {file numbers by pattern}). 94 95 Ask the thread to stop by calling its join() method. 96 """
97 - def __init__(self, dir_q, result_q, pattern):
98 super(WorkerThread, self).__init__() 99 self.dir_q = dir_q 100 self.result_q = result_q 101 self.REpattern = pattern 102 self.stoprequest = threading.Event()
103
104 - def run(self):
105 """ Run the threads. 106 """ 107 while not self.stoprequest.isSet(): 108 try: 109 dirName = self.dir_q.get(True, 0.05) 110 fileCounts = self._files_in_dir(dirName) 111 self.result_q.put((self.name, dirName, fileCounts)) 112 except Queue.Empty: 113 continue
114
115 - def join(self, timeout=None):
116 self.stoprequest.set() 117 super(WorkerThread, self).join(timeout)
118
119 - def _files_in_dir(self, dirName):
120 """ Given a directory name, yields the number of all files of the 121 given regex patterns contained in this directory. 122 """ 123 statsFile = createStatsFile(dirName) 124 # check if stats file is older than dir modification 125 if statsFile.exists() and \ 126 os.stat(statsFile.name).st_mtime >= os.stat(dirName).st_mtime: 127 fileCount = list(statsFile.pickleRead())[0] 128 else: 129 fileCount = defaultdict(int) 130 for path, dirs, files in os.walk(dirName): 131 # prune dirs 132 for dotDir in (".svn", ".emacs_backups"): 133 if dotDir in dirs: 134 dirs.remove(dotDir) 135 # get file count 136 for fileName in files: 137 for pattern in self.REpattern: 138 if self.REpattern[pattern].search(fileName): 139 fileCount[pattern] += 1 140 statsFile.pickleWrite(fileCount) 141 return fileCount
142 143 #------------------------------------------------------------------------------ 144
145 -class CuStats(object):
146 """ Trawls database to produce CU statistics. 147 """ 148 archive = None #: Connection to database to query. 149 cuStats = defaultdict(list) #: CU statistics. 150 surveys = None #: List of surveys depending on archive. 151 obsCal = None #: Observation dates. 152 versNums = [] #: List of version numbers. 153 excludedFileTypes = ["_list"] #: exclude these files 154 remStatsCU = '0' #: Remove the stats file for this CU (1/2) 155 verbose = 0 156 #-------------------------------------------------------------------------- 157
158 - def __init__(self):
159 """ 160 Initialise. 161 162 """ 163 self.archive.enableDirtyRead() 164 #self.versStep = self.archive.sysc.versStep 165 #self.versFormat = self.archive.sysc.versFormat 166 self.Statistics = namedtuple("Statistics", 167 ', '.join(CuStats.surveys.fullList) + ", all") 168 self.additArchives = {} 169 if self.archive.sysc.isVSA(): 170 if self.archive.database == "VSA": 171 if self.archive.server == "ramses2": 172 self.additArchives["vvv"] = AdditDbSessions( 173 "VSAVVV11", DbSession( 174 database="ramses12.VSAVVV", 175 userName=dbc.loadServerRwUsername()), 176 ["1.1", "1.2"]) 177 else: 178 self.additArchives["vvv"] = AdditDbSessions( 179 "VSAVVV13", DbSession( 180 database="ramses14.VSAVVV", 181 userName=dbc.loadServerRwUsername()), 182 ["1.3"]) 183 self.allArchives = self.additArchives 184 if self.archive.sysc.isVSA(): 185 if self.archive.server == "ramses2": 186 self.allArchives["vsa"] = AdditDbSessions( 187 "VSA12", DbSession( 188 database="ramses2.VSA", 189 userName=dbc.loadServerRwUsername()), 190 ["1.1", "1.2", "1.3"]) 191 else: 192 self.allArchives["vsa"] = AdditDbSessions( 193 "VSA", DbSession( 194 database="ramses9.VSA", 195 userName=dbc.loadServerRwUsername()), 196 ["1.3"]) 197 198 elif self.archive.sysc.isOSA(): 199 self.allArchives["osa"] = AdditDbSessions( 200 "OSA", DbSession( 201 database="ramses1.OSA", 202 userName=dbc.loadServerRwUsername()), 203 ["0.9", "1.0"]) 204 205 else: 206 self.allArchives["wsa"] = AdditDbSessions( 207 "WSA", DbSession( 208 database="ramses1.WSA", 209 userName=dbc.loadServerRwUsername()), 210 ['1', '2', '3', '4', '5']) 211 212 if "test" in self.archive.database.lower(): 213 versions = {"VSA": ["1.3"], "WSA": ['5'], "OSA": ["1.0"]} 214 self.allArchives[self.archive.sysc.loadDatabase.lower()] = \ 215 AdditDbSessions(self.archive.database, 216 DbSession(database="%s.%s" % ( 217 self.archive.server, self.archive.database), 218 userName=dbc.loadServerRwUsername()), 219 versions[self.archive.sysc.loadDatabase]) 220 221 if self.archive.sysc.isVSA(): 222 self.allArchives["vvv"] = AdditDbSessions( 223 "VSAVVV13", DbSession( 224 database="%s.%s" % ( 225 self.archive.server, self.archive.database), 226 userName=dbc.loadServerRwUsername()), 227 versions[self.archive.sysc.loadDatabase])
228 229 #-------------------------------------------------------------------------- 230
231 - def _getFileCount(self, directories, REpattern):
232 # Create a single input and a single output queue for all threads. 233 dir_q = Queue.Queue() 234 result_q = Queue.Queue() 235 fileCounts = defaultdict(lambda : defaultdict(int)) 236 237 # Create the "thread pool" 238 pool = [WorkerThread(dir_q=dir_q, result_q=result_q, pattern=REpattern) 239 for i in range(8)] 240 241 # Start all threads 242 for thread in pool: 243 thread.start() 244 245 # Give the workers some work to do 246 print('Assigning %s dirs to workers' % len(directories)) 247 work_count = 0 248 for direc in directories: 249 if os.path.exists(direc): 250 work_count += 1 251 dir_q.put(direc) 252 253 # Now get all the results 254 while work_count > 0: 255 # Blocking 'get' from a Queue. 256 result = result_q.get() 257 if verbose > 5: 258 print('From thread %s: No. of files found in dir %s: %r' % ( 259 result[0], result[1], result[2])) 260 if result: 261 print('.', sep='', end='') #result 262 sys.stdout.flush() 263 for pattern in REpattern: 264 fileCounts[pattern][os.path.basename(result[1])] += \ 265 result[2][pattern] 266 work_count -= 1 267 print() 268 269 # Ask threads to die and wait for them to do it 270 for thread in pool: 271 thread.join() 272 273 return fileCounts
274 275 #-------------------------------------------------------------------------- 276
277 - def getCu1Stats(self, semesters):
278 """ 279 CU1Stats are {date: (pixNum, catNum, tileNum, fileNum)} 280 281 @param semesters: List of semesters. 282 @type semesters: list 283 284 """ 285 Logger.addMessage("Calculating CU1 statistics...") 286 cu1Files = FitsList(self.archive.sysc, prefix="mon_") 287 cu1Files.createFitsDateDict() 288 cu1Stats = defaultdict(dict) 289 290 dbFileDict = defaultdict(int) 291 for sem in semesters: 292 startDate, endDate = self.obsCal.getDates(sem) 293 dateStrList = [] 294 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 295 date = mxTime.DateTimeFromMJD(mjd) 296 for versNum in self.versNums[sem]: 297 dateStrList.append(fixPointEight(dateVersFormat % ( 298 date.year, date.month, date.day, versNum))) 299 300 for con in [self.archive]: 301 # enable dirty read 302 con.enableDirtyRead() 303 304 where = " AND ".join([ 305 "fileName not like '%e20%'", 306 "multiframeID < 1000000000000", 307 "dateVersStr in ('%s')" % "','".join(dateStrList)]) 308 309 results = con.query( 310 selectStr="dateVersStr,count(*)", 311 fromStr="FlatFileLookUp", 312 whereStr=where, 313 groupBy="dateVersStr", orderBy="dateVersStr") 314 315 for entry in results: 316 dbFileDict[entry[0]] += entry[1] 317 318 if self.verbose > 5: 319 print("finished querying FlatFileLookUp:" 320 " %d entries in dbFileDict" % len(dbFileDict)) 321 for sem in semesters: 322 startDate, endDate = self.obsCal.getDates(sem) 323 allDirPaths = [] 324 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 325 date = mxTime.DateTimeFromMJD(mjd) 326 if date not in self.obsCal.notObsDates()[sem]: 327 dateStr = dateFormat % (date.year, date.month, date.day) 328 for versNum in self.versNums[sem]: 329 dateVersStr = ("%s_v%s") % (dateStr, versNum) 330 if dateVersStr in cu1Files.invFitsDateDict: 331 diskPath = cu1Files.invFitsDateDict[dateVersStr] 332 if type(diskPath) == type([]): 333 diskPath = diskPath[0] 334 allDirPaths.append( 335 os.path.join(diskPath, dateVersStr)) 336 if self.verbose > 5: 337 print("allDirPaths: %r" % allDirPaths) 338 fileNumDict = defaultdict() 339 patternDict = {"fit": re.compile(r'.*\.fit.*'), 340 "cat": re.compile(r'.*%s\.fits\b' % \ 341 self.archive.sysc.catSuffix), 342 "pix": re.compile(r'%s.*\.fit\b' % \ 343 self.archive.sysc.casuPrefix), 344 "tl": re.compile(r'%s.*tl.*\.fit\b' % \ 345 self.archive.sysc.casuPrefix)} 346 for fileType in self.excludedFileTypes: 347 patternDict[fileType] = re.compile(r'.*%s.*' % fileType) 348 if self.archive.sysc.isOSA(): 349 patternDict["fix"] = re.compile(r'.*%s\.fits\b' % \ 350 self.archive.sysc.fixcatSuffix) 351 352 if '1' in self.remStatsCU: 353 for dirPath in allDirPaths: 354 statsFile = createStatsFile(dirPath) 355 statsFile.remove() 356 del statsFile 357 358 Logger.addMessage("Getting file counts...") 359 fileNumDict = self._getFileCount(allDirPaths, patternDict) 360 361 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 362 date = mxTime.DateTimeFromMJD(mjd) 363 if date not in self.obsCal.notObsDates()[sem]: 364 dateStr = dateFormat % (date.year, date.month, date.day) 365 for versNum in self.versNums[sem]: 366 cu1Stats[date.date][versNum] = (0, 0, 0, 0, 0) 367 dateVersStr = ("%s_v%s") % (dateStr, versNum) 368 if dateVersStr in cu1Files.invFitsDateDict: 369 fileNum = fileNumDict["fit"][dateVersStr] 370 catNum = fileNumDict["cat"][dateVersStr] 371 pixNum = fileNumDict["pix"][dateVersStr] 372 tileNum = fileNumDict["tl"][dateVersStr] 373 fixcatNum = 0 374 excludedNum = 0 375 376 # remove counts for excluded file types 377 for fileType in self.excludedFileTypes: 378 excludedNum += fileNumDict[fileType][dateVersStr] 379 fileNum -= excludedNum 380 381 if self.archive.sysc.isOSA(): 382 fixcatNum =fileNumDict["fix"][dateVersStr] 383 tileNum = catNum - fixcatNum 384 fileNum -= tileNum 385 catNum = fixcatNum 386 387 cu1Stats[date.date][versNum] = \ 388 (pixNum, catNum, tileNum, fileNum, 389 dbFileDict[dateVersStr]) 390 else: 391 cu1Stats[date.date][versNum] = \ 392 (0, 0, 0, 0, dbFileDict[dateVersStr]) 393 394 self.cuStats["cu1"].append(cu1Stats)
395 396 397 #-------------------------------------------------------------------------- 398
399 - def getCu2Stats(self, semesters):
400 """ 401 CU2Stats are {date: (fileNum, fileNum/maxHDU)} 402 403 @param semesters: List of semesters. 404 @type semesters: list 405 406 """ 407 Logger.addMessage("Calculating CU2 statistics...") 408 cu2Files = FitsList(self.archive.sysc, prefix="mon_") 409 cu2Files.createFitsDateDict(ingestDirectory="products/jpgs", 410 forceLists=True) 411 cu2Stats = defaultdict(dict) 412 dbJpgsDict = defaultdict(int) 413 for sem in semesters: 414 startDate, endDate = self.obsCal.getDates(sem) 415 dateStrList = [] 416 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 417 date = mxTime.DateTimeFromMJD(mjd) 418 for versNum in self.versNums[sem]: 419 dateStrList.append(fixPointEight(dateVersFormat % ( 420 date.year, date.month, date.day, versNum))) 421 422 for con in [self.archive]: 423 # enable dirty read 424 con.enableDirtyRead() 425 if self.verbose > 5: 426 print("Querying %s.%s ..." % (con.server, con.database)) 427 428 where = " AND ".join([ 429 "compfile not like 'NONE'", 430 "fileName not like '%e20%'", 431 "deprecated < 128", 432 "dateVersStr in ('%s')" % "','".join(dateStrList)]) 433 434 results = con.query( 435 selectStr="dateVersStr,count(*)", 436 fromStr=Join(["MultiframeDetector", "FlatFileLookUp"], 437 ["multiframeID"]), 438 whereStr=where, 439 groupBy="dateVersStr", orderBy="dateVersStr") 440 441 if self.verbose > 5: 442 print("got %d entries in MultiframeDetector" % len(results)) 443 for entry in results: 444 dbJpgsDict[entry[0]] += entry[1] 445 446 if self.verbose > 5: 447 print("finished querying MultiframeDetector:" 448 " %d entries in dbJpgsDict" % len(dbJpgsDict)) 449 450 for sem in semesters: 451 startDate, endDate = self.obsCal.getDates(sem) 452 # get all paths to jpg directories 453 allDirPaths = [] 454 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 455 date = mxTime.DateTimeFromMJD(mjd) 456 dateStr = dateFormat % (date.year, date.month, date.day) 457 for versNum in self.versNums[sem]: 458 dateVersStr = ("%s_v%s") % (dateStr, versNum) 459 if dateVersStr in cu2Files.invFitsDateDict: 460 for diskPath in cu2Files.invFitsDateDict[dateVersStr]: 461 if type(diskPath) == type([]): 462 diskPath = diskPath[0] 463 allDirPaths.append( 464 os.path.join(diskPath, dateVersStr)) 465 fileNumDict = defaultdict() 466 patternDict = {"jpg": re.compile(r'.*\.jpg\b'), 467 "e20": re.compile(r'e20.*\.jpg\b'), 468 "tl": re.compile(r'.*tl.*\.jpg\b')} 469 470 if '2' in self.remStatsCU: 471 for dirPath in allDirPaths: 472 statsFile = createStatsFile(dirPath) 473 statsFile.remove() 474 del statsFile 475 476 Logger.addMessage("Getting JPG counts...") 477 fileNumDict = self._getFileCount(allDirPaths, patternDict) 478 479 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 480 date = mxTime.DateTimeFromMJD(mjd) 481 dateStr = dateFormat % (date.year, date.month, date.day) 482 for versNum in self.versNums[sem]: 483 cu2Stats[date.date][versNum] = (0, 0, 0) 484 dateVersStr = ("%s_v%s") % (dateStr, versNum) 485 if dateVersStr in cu2Files.invFitsDateDict: 486 fileNum = fileNumDict["jpg"][dateVersStr] 487 eFileNum = fileNumDict["e20"][dateVersStr] 488 tileNum = fileNumDict["tl"][dateVersStr] 489 fileNum -= eFileNum 490 fileNum -= tileNum 491 492 try: 493 jpgsInDb = dbJpgsDict[fixPointEight(dateVersStr)] / ( 494 fileNum + tileNum) 495 except KeyError: 496 jpgsInDb = -1 497 pass 498 except ZeroDivisionError: 499 jpgsInDb = -2 500 pass 501 502 if fileNum > 0: 503 cu2Stats[date.date][versNum] = \ 504 (fileNum + tileNum, 505 int(fileNum / (self.archive.sysc.maxHDUs - 1)) \ 506 + tileNum, 507 jpgsInDb) 508 else: 509 cu2Stats[date.date][versNum] = None 510 511 self.cuStats["cu2"].append(cu2Stats)
512 513 #-------------------------------------------------------------------------- 514
515 - def getCu3Stats(self, semesters):
516 """ 517 CU3Stats are {date: (pixNum, catNum, tileNum, fileNum)} 518 519 @param semesters: List of semesters. 520 @type semesters: list 521 522 """ 523 Logger.addMessage("Calculating CU3 statistics...") 524 cu3Files = FitsList(self.archive.sysc, prefix="mon_") 525 cu3Files.fitsDateDict = defaultdict(list) 526 cu3Stats = defaultdict(dict) 527 for sem in semesters: 528 startDate, endDate = self.obsCal.getDates(sem) 529 dateStrList = [] 530 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 531 date = mxTime.DateTimeFromMJD(mjd) 532 dateStr = dateFormat % (date.year, date.month, date.day) 533 for versNum in self.versNums[sem]: 534 dateVersStr = ("%s_v%s") % ( 535 dateStr, versNum) 536 dateStrList.append(fixPointEight(dateVersStr)) 537 538 for con in [self.archive]: 539 # enable dirty read 540 con.enableDirtyRead() 541 results = utils.unpackList(con.query( 542 selectStr="Multiframe.fileName, catName", 543 fromStr=Join(["Multiframe", "FlatFileLookUp"], 544 ["multiframeID"]), 545 whereStr=" AND ".join([ 546 "dateVersStr in ('%s')" % "','".join(dateStrList), 547 "Multiframe.fileName NOT LIKE '%e20%'", 548 "Multiframe.filename NOT LIKE '%deprecated%'"]))) 549 550 for entry in results: 551 if entry != dbc.charDefault(): 552 dateStr = os.path.basename(os.path.dirname(entry)) 553 cu3Files.fitsDateDict[dateStr].append(entry) 554 for dateStr in cu3Files.fitsDateDict: 555 cu3Files.fitsDateDict[dateStr] = list(set( 556 cu3Files.fitsDateDict[dateStr])) 557 558 for sem in semesters: 559 startDate, endDate = self.obsCal.getDates(sem) 560 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 561 date = mxTime.DateTimeFromMJD(mjd) 562 dateStr = dateFormat % (date.year, date.month, date.day) 563 for versNum in self.versNums[sem]: 564 cu3Stats[date.date][versNum] = (0, 0, 0, 0) 565 dateVersStr = ("%s_v%s") % ( 566 dateStr, versNum) 567 if fixPointEight(dateVersStr) in cu3Files.fitsDateDict: 568 fileNum, pixNum, catNum, tileNum = \ 569 cu3Files.countFileTypes(cu3Files.fitsDateDict[ 570 fixPointEight(dateVersStr)]) 571 if sum([fileNum, pixNum, catNum, tileNum]) > 0: 572 cu3Stats[date.date][versNum] = \ 573 (pixNum, catNum, tileNum, fileNum) 574 else: 575 cu3Stats[date.date][versNum] = None 576 self.cuStats["cu3"].append(cu3Stats)
577 578 #-------------------------------------------------------------------------- 579
580 - def doStuff(self, cu4Files, existDetTabs, progIdDict, conDict, detTab, 581 scienceFitsPrefix, dateStrDict, monthDict, resultsMeta):
582 """ Automatically generated method. 583 """ 584 DetRawDataResults = namedtuple( 585 "DetRawDataResults", "dateVers, mfID, deprecated, dets") 586 587 DetDataResults = namedtuple( 588 "DetDataResults", "dateVers, mfID, dets") 589 590 for archiveName, versNumList in conDict[detTab]: 591 con = self.allArchives[archiveName].connection # enable dirty read 592 con.enableDirtyRead() 593 conServName = '.'.join([con.server, con.database]) 594 for versNum in versNumList: 595 metaMfidDict = defaultdict(set) 596 metaMfidDict128 = defaultdict(set) 597 for x in resultsMeta[versNum]: 598 if (x.dateVers in dateStrDict[versNum] and \ 599 x.progID in progIdDict and \ 600 progIdDict[x.progID] == detTab): 601 if compare(x.deprecated, deprecationComp): 602 metaMfidDict[x.dateVers].add(x.mfID) 603 else: 604 metaMfidDict128[x.dateVers].add(x.mfID) 605 606 dateVersList = sorted(set(metaMfidDict.keys()).union( 607 metaMfidDict128.keys())) 608 609 if verbose > 3: 610 print("#days/mfids: %d/%d :: %r" % ( 611 len(metaMfidDict.values()), 612 len(list(utils.unpackList(metaMfidDict.values()))), 613 dateVersList)) 614 resultsRaw = [] 615 resultsPhoto = [] 616 resultsAstro = [] 617 if (len(list(utils.unpackList( 618 metaMfidDict.values()))) > 0 or 619 len(list(utils.unpackList( 620 metaMfidDict128.values()))) > 0): 621 # query detectionRaw table 622 Logger.addMessage("%s ...%sRaw v%s" % (conServName, 623 detTab, versNum)) 624 for month in sorted(monthDict[versNum]): 625 detectTab = ("%sRaw%s" % (detTab, month) 626 if "vvv" in detTab else "%sRaw" % detTab) 627 if not ("vvv" in detTab and \ 628 detectTab not in existDetTabs): 629 resultsRaw.extend(self.queryTableR( 630 con, detectTab, monthDict[versNum][month], 631 scienceFitsPrefix, DetRawDataResults)) 632 633 if verbose > 4: 634 print("RAW:", resultsRaw) 635 print(">>", len(resultsRaw)) 636 if verbose > 3 and not "uhs" in detTab: 637 print("RAW:", resultsRaw) 638 639 raw0MfidDict = defaultdict(set) 640 for x in resultsRaw: 641 if (x.dateVers in dateStrDict[versNum] and 642 compare(x.deprecated, deprecationComp)): 643 raw0MfidDict[x.dateVers].add(x.mfID) 644 645 # query detectionPhotometry table 646 Logger.addMessage("%s ...%sPhotometry v%s" % ( 647 len(conServName) * ' ', detTab, versNum)) 648 for month in sorted(monthDict[versNum]): 649 detectTab = ("%sPhotometry%s" % (detTab, month) 650 if "vvv" in detTab else "%sPhotometry" % detTab) 651 if not ("vvv" in detTab and 652 detectTab not in existDetTabs): 653 resultsPhoto.extend(self.queryTablePA( 654 con, detectTab, monthDict[versNum][month], 655 scienceFitsPrefix, DetDataResults)) 656 657 if verbose > 4: 658 print("PHOTO:", resultsPhoto) 659 print(">", len(resultsPhoto)) 660 661 # query detectionAstrometry table 662 Logger.addMessage("%s ...%sAstrometry v%s" % ( 663 len(conServName) * ' ', detTab, versNum)) 664 for month in sorted(monthDict[versNum]): 665 detectTab = ("%sAstrometry%s" % (detTab, month) 666 if "vvv" in detTab 667 else "%sAstrometry" % detTab) 668 if not ("vvv" in detTab and 669 detectTab not in existDetTabs): 670 resultsAstro.extend(self.queryTablePA( 671 con, detectTab, monthDict[versNum][month], 672 scienceFitsPrefix, DetDataResults)) 673 674 if verbose > 4: 675 print("ASTRO:", resultsAstro) 676 print(">", len(resultsAstro)) 677 else: 678 if verbose > 0: 679 Logger.addMessage("%s ...%s v%s ---" % ( 680 conServName, detTab.replace("Detection", ''), 681 versNum)) 682 683 # calculate sums for all data sets 684 for dateVersStr in sorted(dateStrDict[versNum]): 685 if dateVersStr in dateVersList: 686 detSumMeta = sum( 687 [x.fitsRows for x in resultsMeta[versNum] if 688 x.dateVers == dateVersStr and 689 x.progID in progIdDict and 690 progIdDict[x.progID] == detTab and 691 compare(x.deprecated, deprecationComp)]) 692 if verbose > 3 and "vik" in detTab \ 693 and not "uhs" in detTab: 694 for x in resultsMeta[versNum]: 695 if x.dateVers == dateVersStr and \ 696 x.progID in progIdDict and \ 697 progIdDict[x.progID] == detTab and \ 698 compare(x.deprecated, deprecationComp): 699 Logger.addMessage( 700 "%s: META: %s :: %d : %d" % ( 701 dateVersStr, progIdDict[x.progID], 702 x.mfID, x.fitsRows)) 703 detSumMeta128 = sum( 704 [x.fitsRows for x in resultsMeta[versNum] if 705 x.dateVers == dateVersStr and 706 x.progID in progIdDict and 707 progIdDict[x.progID] == detTab and 708 not compare(x.deprecated, deprecationComp)]) 709 detSumRaw0 = sum( 710 [x.dets for x in resultsRaw if 711 x.dateVers == dateVersStr and 712 compare(x.deprecated, deprecationComp)]) 713 detSumRaw128 = sum( 714 [x.dets for x in resultsRaw if 715 x.dateVers == dateVersStr and 716 not compare(x.deprecated, deprecationComp)]) 717 detSumPhoto = sum( 718 [x.dets for x in resultsPhoto if 719 x.dateVers == dateVersStr and 720 x.mfID in raw0MfidDict[x.dateVers]]) 721 detSumAstro = sum( 722 [x.dets for x in resultsAstro if 723 x.dateVers == dateVersStr and 724 x.mfID in raw0MfidDict[x.dateVers]]) 725 else: 726 detSumMeta = 0 727 detSumMeta128 = 0 728 detSumRaw0 = 0 729 detSumRaw128 = 0 730 detSumPhoto = 0 731 detSumAstro = 0 732 733 cu4Files.fitsDateDict[fixPointEight(dateVersStr)]\ 734 .append((detTab.replace("Detection", ''), 735 (detSumRaw0, detSumPhoto, detSumAstro, 736 detSumMeta, detSumRaw128, detSumMeta128)))
737 738 #-------------------------------------------------------------------------- 739
740 - def getCu4Stats(self, semesters):
741 """ 742 CU4Stats are {date: (rawStats[0,128], photoStats, fitsStats)} 743 744 @param semesters: List of semesters. 745 @type semesters: list 746 747 """ 748 Logger.addMessage("Calculating CU4 statistics...") 749 cu4Files = FitsList(self.archive.sysc, prefix="mon_") 750 cu4Files.createFitsDateDict() 751 cu4Stats = defaultdict(lambda : defaultdict()) 752 cu4Db = (self.archive.database if self.archive.database == "VSA_v1_3" 753 else self.archive.sysc.loadDatabase) 754 for sem in semesters: 755 doneCU3 = defaultdict() 756 doneCU4 = defaultdict() 757 for versNum in self.versNums[sem]: 758 doneCU3[versNum] = False 759 doneCU4[versNum] = False 760 761 startDate, endDate = self.obsCal.getDates(sem) 762 if sem == "2007A": 763 startDate = startDate - 4 764 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 765 date = mxTime.DateTimeFromMJD(mjd) 766 dateStr = dateFormat % (date.year, date.month, date.day) 767 for versNum in self.versNums[sem]: 768 dateVersStr = ("%s_v%s") % (dateStr, versNum) 769 #if fixPointEight(dateVersStr) \ 770 # in cu4Files.invFitsDateDict: 771 # diskPath = cu4Files.invFitsDateDict[ 772 # fixPointEight(dateVersStr)] 773 if dateVersStr in cu4Files.invFitsDateDict: 774 diskPath = cu4Files.invFitsDateDict[dateVersStr] 775 if type(diskPath) == type([]): 776 diskPath = diskPath[0] 777 dirPath = os.path.join(diskPath, dateVersStr) 778 if os.path.exists(os.path.join( 779 dirPath, "CU03ED_%s" % cu4Db)): 780 doneCU3[versNum] = True 781 if any([os.path.exists(os.path.join(dirPath, 782 "CU04ED_%s" % cu4Db)), 783 os.path.exists(os.path.join(dirPath, 784 "CU04ED_%sVVV" % cu4Db))]): 785 doneCU4[versNum] = True 786 787 if any(doneCU3.values()): 788 dbTables = set() 789 existDetTabs = set() 790 for con in [self.allArchives[archiveName].connection 791 for archiveName in self.allArchives]: 792 Logger.addMessage( 793 "Getting detection table names from %s..." % 794 '.'.join([con.server, con.database])) 795 # enable dirty read 796 con.enableDirtyRead() 797 where = " AND ".join([ 798 "name LIKE '%Detection'", 799 "name NOT LIKE '%[_]%'", 800 ]) 801 dbTables.update(con.query( 802 "name", "sysobjects", whereStr=where)) 803 existDetTabs.update(con.query( 804 "name", "sysobjects", "name like 'vvvDetection%'")) 805 806 minProgID = (100 if self.archive.sysc.isWSA() else 1) 807 tmpProgIdDict = dict(self.archive.query( 808 "programmeID, detectionTable", "Programme", 809 "detectionTable like '%%Detection%%' AND programmeID>=%d" % minProgID)) 810 if self.archive.sysc.isVSA() and "SV" not in semesters: 811 del tmpProgIdDict[100] # remove SV-Orion 812 del tmpProgIdDict[101] # remove SV-NGC253 813 814 theDetSurvs = [x.replace("Detection", '') 815 for x in tmpProgIdDict.values()] 816 tmpProgIdDict.update(utils.invertDict(tmpProgIdDict)) 817 818 progIdDict = defaultdict() 819 theSurvs = [] 820 for surv in theDetSurvs: 821 if self.archive.sysc.isVSA(): 822 if surv.startswith(('s', 'u', 'v')) \ 823 and surv in self.surveys.fullList: 824 theSurvs.append(surv + "Detection") 825 elif len(filter(str.isdigit, surv)) > 0 \ 826 and "ddt_ns" in self.surveys.fullList: 827 theSurvs.append(surv + "Detection") 828 elif surv in ["cal", "tech", "maint", "comm"] \ 829 and "cal_comm" in self.surveys.fullList: 830 theSurvs.append(surv + "Detection") 831 elif self.archive.sysc.isWSA(): 832 if surv in ["dxs", "gcs", "gps", "las", "uds", "uhs"] \ 833 and surv in self.surveys.fullList: 834 theSurvs.append(surv + "Detection") 835 elif len(filter(str.isdigit, surv)) > 0 \ 836 and surv in ["cal", "comm", "pts"] \ 837 and "cal_ns" in self.surveys.fullList: 838 theSurvs.append(surv + "Detection") 839 else: 840 theSurvs.append(surv + "Detection") 841 842 for detName in theSurvs: 843 progIdDict[detName] = tmpProgIdDict[detName] 844 progIdDict[tmpProgIdDict[detName]] = detName 845 846 detTablesDict = {} 847 conDict = defaultdict(list) 848 archives = defaultdict(list) 849 for archiveName in self.allArchives: 850 if "vvv" in archiveName: 851 archives["vvv"].append(archiveName) 852 else: 853 archives["other"].append(archiveName) 854 855 versions = [v for v in self.allArchives[archiveName].versions 856 if self.obsCal.minVers(sem) <= v <= self.obsCal.maxVers(sem) 857 and v in self.versNums[sem]] 858 859 for detTab in dbTables.intersection(progIdDict): 860 detTablesDict[detTab] = progIdDict[detTab] 861 if "vvv" in detTab: 862 for archiveName in archives["vvv"]: 863 conDict[detTab].append( 864 (archiveName, tuple(versions))) 865 else: 866 for archiveName in archives["other"]: 867 conDict[detTab].append( 868 (archiveName, tuple(versions))) 869 if verbose > 4: 870 print("dbt:", dbTables) 871 print("pid:", progIdDict) 872 print("dtd:", detTablesDict) 873 print(">cd:", conDict) 874 875 MetaDataResults = namedtuple( 876 "MetaDataResults", 877 "dateVers, mfID, fileName, progID, deprecated, fitsRows") 878 scienceFitsPrefix = cu4Db[0].lower() + "20" 879 880 startDate, endDate = self.obsCal.getDates(sem) 881 dateStrDict = defaultdict(list) 882 monthDict = defaultdict(dict) 883 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 884 date = mxTime.DateTimeFromMJD(mjd) 885 for versNum in self.versNums[sem]: 886 dateStrDict[versNum].append(fixPointEight( 887 dateVersFormat % ( 888 date.year, date.month, date.day, versNum))) 889 yearMonth = "%4d%02d" % (date.year, date.month) 890 if yearMonth in monthDict[versNum]: 891 monthDict[versNum][yearMonth].append( 892 fixPointEight(dateVersFormat % ( 893 date.year, date.month, date.day, 894 versNum))) 895 else: 896 monthDict[versNum][yearMonth] = [ 897 fixPointEight(dateVersFormat % ( 898 date.year, date.month, date.day, 899 versNum))] 900 901 resultsMeta = defaultdict(set) 902 for archiveName, versNumList in set( 903 utils.unpackList(conDict.values())): 904 con = self.allArchives[archiveName].connection 905 # enable dirty read 906 con.enableDirtyRead() 907 Logger.addMessage( 908 "Getting metadata from %s..." % 909 '.'.join([con.server, con.database])) 910 for versNum in versNumList: 911 metaSelect = "dateVersStr, f.multiframeID, "\ 912 "f.fileName, p.programmeID, d.deprecated, "\ 913 "sum(cast(tableRows as bigint))" 914 metaFrom = "Multiframe m, FlatFileLookUp f, "\ 915 "MultiframeDetector d, ProgrammeFrame p" 916 metaWhere = ' AND '.join([ 917 "m.multiframeID=f.multiframeID", 918 "m.multiframeID=d.multiframeID", 919 "m.multiframeID=p.multiframeID", 920 "p.programmeID>=%d" % minProgID, 921 "f.dateVersStr in ('%s')" % "','".join( 922 dateStrDict[versNum]), 923 "m.fileName like '%%%s%%'" % scienceFitsPrefix, 924 "m.catName not like '%empty_cat%'", 925 "f.fileName NOT LIKE '%e20%'"]) 926 metaGroup = ' '.join([ 927 " GROUP BY dateVersStr, f.multiframeID,", 928 "f.fileName, p.programmeID, d.deprecated", 929 "ORDER BY dateVersStr, f.multiframeID,", 930 "f.fileName, p.programmeID"]) 931 resultsMeta[versNum].update(con.query( 932 selectStr=metaSelect, 933 fromStr=metaFrom, 934 whereStr=metaWhere + metaGroup, 935 ResultsTuple=MetaDataResults)) 936 if verbose > 4: 937 print("ResultsMeta:", len(resultsMeta[versNum])) 938 939 Logger.addMessage("Getting counts from detection tables...") 940 941 for detTab in sorted(conDict): 942 self.doStuff(cu4Files, existDetTabs, progIdDict, 943 conDict, detTab, scienceFitsPrefix, 944 dateStrDict, monthDict, resultsMeta) 945 946 for archiveName in self.additArchives: 947 try: 948 self.additArchives[archiveName].connection.goOffline() 949 except DbSession.DisconnectError: 950 pass 951 952 Logger.addMessage("Creating CU4 stats.....") 953 zeroStats = self.Statistics._make((len(self.surveys.fullList) + 1) * [0]) 954 955 for sem in semesters: 956 startDate, endDate = self.obsCal.getDates(sem) 957 for mjd in range(int(startDate.mjd), int(endDate.mjd) + 1): 958 date = mxTime.DateTimeFromMJD(mjd) 959 dateStr = dateFormat % (date.year, date.month, date.day) 960 for versNum in self.versNums[sem]: 961 #zeroStats = zeroStats._replace(versNum=versNum) 962 963 dateVersStr = ("%s_v%s") % (dateStr, versNum) 964 if dateVersStr in cu4Files.fitsDateDict: 965 stats = dict(cu4Files.fitsDateDict[dateVersStr]) 966 967 # calc stats for raw0 data 968 values = self.calcValues(sysc, stats, 0) 969 rawStats = self.Statistics(**values) 970 del values 971 972 # calc stats for raw128 data 973 values = self.calcValues(sysc, stats, 4) 974 rawStats128 = self.Statistics(**values) 975 del values 976 977 # calc stats for photometry data 978 values = self.calcValues(sysc, stats, 1) 979 photoStats = self.Statistics(**values) 980 del values 981 982 # calc stats for astrometry data 983 values = self.calcValues(sysc, stats, 2) 984 astroStats = self.Statistics(**values) 985 del values 986 987 # calc stats for catalogue counts 988 values = self.calcValues(sysc, stats, 3) 989 fitsStats = self.Statistics(**values) 990 del values 991 992 # calc stats for deprecated catalogue counts 993 values = self.calcValues(sysc, stats, 5) 994 fitsStats128 = self.Statistics(**values) 995 del values 996 997 # update cu4Stats 998 cu4Stats[date.date][versNum] = ( 999 (rawStats, photoStats, astroStats, fitsStats, 1000 rawStats128, fitsStats128)) 1001 1002 # no data available 1003 if rawStats.all == 0 and doneCU4[versNum]: 1004 cu4Stats[date.date][versNum] = ( 1005 (zeroStats, zeroStats, zeroStats, fitsStats, 1006 rawStats128, fitsStats128)) 1007 self.cuStats["cu4"].append(cu4Stats)
1008 1009 #-------------------------------------------------------------------------- 1010
1011 - def calcValues(self, sysc, stats, idx):
1012 values = dict.fromkeys( 1013 ["all"] + self.surveys.fullList, 0) 1014 for survey in stats: 1015 if survey in values: 1016 values[survey] = stats[survey][idx] 1017 elif sysc.isVSA() and \ 1018 survey.startswith(("d2", "n0")): 1019 values["ddt_ns"] += stats[survey][idx] 1020 elif sysc.isOSA() and \ 1021 survey.startswith(("tech", "n0")): 1022 values["tech_ns"] += stats[survey][idx] 1023 else: 1024 values[self.surveys.fullList[-1]] += \ 1025 stats[survey][idx] 1026 values["all"] += stats[survey][idx] 1027 return values
1028 1029 #-------------------------------------------------------------------------- 1030
1031 - def queryTableR(self, con, tableName, dateList, sciencePrefix, 1032 resultsTuple):
1033 """ 1034 Query given detection table. 1035 1036 @param con: DB connection. 1037 @type con: DbSession object 1038 @param tableName: detection table to query. 1039 @type tableName: str 1040 @param dateList: List of dateVersStr. 1041 @type dateList: list 1042 @param sciencePrefix: Prefix of science data FITS files. 1043 @type sciencePrefix: str 1044 @param resultsTuple: Named tuple to be used for the result. 1045 @type resultsTuple: namedtuple 1046 1047 """ 1048 try: 1049 sqlSelect = "dateVersStr, f.multiframeID, "\ 1050 "d.deprecated, count(d.multiframeID)" 1051 sqlFrom = "FlatFileLookUp f, %s d" % tableName 1052 sqlWhere = ' AND '.join([ 1053 "f.multiframeID=d.multiframeID", 1054 "f.filename like '%" + sciencePrefix + "%'", 1055 "f.dateVersStr in ('%s')" % "','".join(dateList), 1056 "d.seqnum>0", 1057 "d.deprecated<128"]) 1058 sqlGroup = " GROUP BY dateVersStr, f.multiframeID, d.deprecated" 1059 sqlOption = " OPTION (FORCE ORDER)" 1060 1061 result = con.query( 1062 selectStr=sqlSelect, fromStr=sqlFrom, 1063 whereStr=sqlWhere + sqlGroup + sqlOption, 1064 ResultsTuple=resultsTuple) 1065 except odbc.ProgrammingError as error: 1066 Logger.addMessage(": ".join( 1067 [con.database, ("%s" % error).split('\n')[0]])) 1068 result = [] 1069 return result
1070 1071 #-------------------------------------------------------------------------- 1072
1073 - def queryTablePA(self, con, tableName, dateList, sciencePrefix, 1074 resultsTuple):
1075 """ 1076 Query given detection table. 1077 1078 @param con: DB connection. 1079 @type con: DbSession object 1080 @param tableName: detection table to query. 1081 @type tableName: str 1082 @param dateList: List of dateVersStr. 1083 @type dateList: list 1084 @param sciencePrefix: Prefix of science data FITS files. 1085 @type sciencePrefix: str 1086 @param resultsTuple: Named tuple to be used for the result. 1087 @type resultsTuple: namedtuple 1088 1089 """ 1090 try: 1091 sqlSelect = "dateVersStr, f.multiframeID, count(d.multiframeID)" 1092 sqlFrom = "FlatFileLookUp f, %s d" % tableName 1093 sqlWhere = ' AND '.join([ 1094 "f.multiframeID=d.multiframeID", 1095 "f.filename like '%" + sciencePrefix + "%'", 1096 "f.dateVersStr in ('%s')" % "','".join(dateList), 1097 "d.seqnum>0"]) 1098 sqlGroup = " GROUP BY dateVersStr, f.multiframeID" 1099 sqlOption = " OPTION (FORCE ORDER)" 1100 if verbose > 7: 1101 Logger.addMessage("SELECT %s FROM %s WHERE %s %s %s" % ( 1102 sqlSelect, sqlFrom, sqlWhere, sqlGroup, sqlOption)) 1103 1104 result = con.query( 1105 selectStr=sqlSelect, fromStr=sqlFrom, 1106 whereStr=sqlWhere + sqlGroup + sqlOption, 1107 ResultsTuple=resultsTuple) 1108 except odbc.ProgrammingError as error: 1109 Logger.addMessage(": ".join( 1110 [con.database, ("%s" % error).split('\n')[0]])) 1111 result = [] 1112 return result
1113 1114 #------------------------------------------------------------------------------ 1115
1116 -class CalendarHTML(HTMLFile):
1117 """ HTML file containing ingest information. 1118 """ 1119 sysc = SystemConstants() 1120 surveys = None 1121 archive = None 1122
1123 - def createLegend(self):
1124 """ Write the legend into the HTML file. 1125 """ 1126 tableBeg = ''.join([ 1127 "<center><h1>Legend</h1></center>\n", 1128 "<TABLE border=0 bgcolor=#888888 width='100%' cellspacing=1", 1129 " cellpadding=1><col width='40px'>", "<col width='760px'>"]) 1130 tableLine = ''.join([ 1131 "<tr>", 1132 "<td class=h align='center'><a><img src='calimg/bluemag_t.gif'", 1133 " alt='Ready for inspection' width='25px' border='0'", 1134 " title='Ready for inspection'></a></td>" 1135 "<td class=v>This column shows whether the data are ready", 1136 " for inspection, ie. data are transferred, JPEGs exist,", 1137 " and image metadata are ingested.</td></tr>\n", 1138 "<tr>", 1139 "<td class=v align='center'><a></a></td>" 1140 "<td class=v>No tickmark means no metadata are ingested yet,", 1141 " so flat file access is not available for this date.</td></tr>\n", 1142 "<tr>", 1143 "<td class=v align='center'><a><img src='calimg/redtick_t.gif'", 1144 " alt='only flat files' width='20px' border='0'", 1145 " title='only flat files available'></td>" 1146 "<td class=v>No JPEGs have been generated yet, but image", 1147 " metadata are ingested so flat file access is available for", 1148 " this date.</td></tr>\n", 1149 "<tr>", 1150 "<td class=v align='center'><a><img src='calimg/yelltick_t.gif'", 1151 " alt='partially ingested' width='20px' border='0'", 1152 " title='JPEGs partially ingested'></a></td>", 1153 "<td class=v>Not all JPEGs have been generated,", 1154 " but image metadata are ingested so flat file access is", 1155 " available for this date.</td></tr>\n", 1156 "<tr>", 1157 "<td class=v align='center'><a><img src='calimg/limetick_t.gif'", 1158 " alt='partially ingested' width='20px' border='0'", 1159 " title='JPEGs partially ingested'></a></td>", 1160 "<td class=v>Not all JPEGs have been ingested,", 1161 " but image metadata are ingested so flat file access is", 1162 " available for this date.</td></tr>\n", 1163 "<tr>", 1164 "<td class=v align='center'><a><img src='calimg/greentick_t.gif'", 1165 " alt='ingested' width='20px' border='0' title='JPEGs ingested'>", 1166 "</a>", "</td>" 1167 "<td class=v>All JPEGs are calculated and ingested, image", 1168 " metadata are ingested.</td></tr>\n", 1169 "<tr height='3'></tr>", 1170 "<tr>", 1171 "<td class='nodata'>&nbsp;</td><td class='nodata'>No " 1172 " %s data taken.</td></tr>\n" % self.sysc.instrumentName, 1173 "<tr>", 1174 "<td class='h2' align='center'>CU1</td>", 1175 "<td class='v'>Number of files transferred from CASU. Subdivided", 1176 " by types: science frames, catalogue files,", 1177 (" (thereof tiles)," if self.sysc.isVSA() else ( 1178 " (uncorrected catalogues)," if self.sysc.isOSA() else '')), 1179 " all files (incl. calibration frames).", 1180 ("<span class='dred'> (less fixed catalogues than uncorrected)" 1181 "</span>" if self.sysc.isOSA() else ''), 1182 "</td></tr>\n", 1183 "<tr>", 1184 "<td class='h1' align='center'>CU2</td><td class='v'>Number", 1185 " of calculated JPEGs. Subdivided by number of JPEGs and", 1186 " number of FITS files. Normally each FITS file has", 1187 " %d JPEGs associated, one per" % (self.sysc.maxHDUs - 1), 1188 " extension.</td></tr>\n", 1189 "<tr>", 1190 "<td class='h2' align='center'>CU3</td>", 1191 "<td class='v'>Number of FITS files that have image metadata", 1192 " ingested. Subdivided by types: pixel data files,", 1193 " catalogue files", 1194 (" (thereof tiles)," if self.sysc.isVSA() else ( 1195 " (+uncorrected catalogues)," if self.sysc.isOSA() else ',')), 1196 " all files.</td></tr>\n", 1197 "<tr>", 1198 "<td class='h1' align='center'>CU4</td>", 1199 "<td class='v'>Number of catalogue data objects that have been", 1200 " ingested: <span class='dred'>no Raw data ingested yet</span>;", 1201 " <span class='orange'>mismatch beetween Photometry and Raw ingests</span>;", 1202 " <span class='yellow'>mismatch between Raw and number suggested by cat size</span>;", 1203 " <span class='orchid'>mismatch in Raw deprecation</span>;", 1204 " <span class='cyan'>mismatch between MultiframeDetector and Raw deprecation</span>." 1205 " Subdivided by survey: %s, " % ', '.join(self.surveys.description), 1206 " and the sum of the catalogue data objects of these surveys.</td></tr>" 1207 ]) 1208 tableEnd = "</TABLE>" 1209 self.writetheline(tableBeg) 1210 self.writetheline(tableLine) 1211 self.writetheline(tableEnd)
1212 1213 #-------------------------------------------------------------------------- 1214
1215 - def getCu1Line(self, date, versStr, cuStats):
1216 """ 1217 Create HTML for CU1 data. 1218 1219 @param date: The date of the data for this line. 1220 @type date: datetime 1221 @param versStr: The version of the data for this line. 1222 @type versStr: str 1223 @param cuStats: Dictionary containing the data. 1224 @type cuStats: dict 1225 1226 @return: The HTML code for this line. 1227 @rtype: str 1228 1229 """ 1230 cuNum = 1 1231 try: 1232 data = cuStats["cu%d" % cuNum][0][date.date][versStr] 1233 self.totalData["cu%d" % cuNum][versStr] += array(data) 1234 # pixel and catalogues 1235 dataLine = ''.join( 1236 ["<td class='border'>", 1237 "</td><td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1238 ("<div class='padd'>%s</div></td>") % versStr, 1239 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1240 "<div class='padd'>%d</div></td>" % data[0], 1241 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1242 "<div class='padd'>%d</div></td>" % data[1]]) 1243 # tiles or uncorrected catalogues 1244 if self.sysc.isVSA(): 1245 dataLine = ''.join( 1246 [dataLine, 1247 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1248 "<div class='padd'>%d</div></td>" % data[2]]) 1249 elif self.sysc.isOSA(): 1250 catOutData = str(data[2]) 1251 if data[1] == data[2]: 1252 catSubTableStyle = 'green' 1253 elif data[1] < data[2]: 1254 catSubTableStyle = 'dred' 1255 catOutData += " (%+d)" % (data[2] - data[1]) 1256 else: 1257 catSubTableStyle = 'dgreen' 1258 catOutData += " (%+d)" % (data[2] - data[1]) 1259 dataLine = ''.join( 1260 [dataLine, 1261 "<td class=%s align='right'>" % catSubTableStyle, 1262 "<div class='padd'>%s</div></td>" % catOutData]) 1263 # all files 1264 dataLine = ''.join( 1265 [dataLine, 1266 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1267 "<div class='padd'>%d " % data[3], 1268 "<font color='#7d7d7d'>[%d]</font></div></td>" % data[4]]) 1269 1270 except (LookupError, TypeError): 1271 if float(versStr) > 0: 1272 raise 1273 try: 1274 cu2Data = cuStats["cu2"][0][date.date][versStr] 1275 dataLine = ''.join( 1276 ["<td class='border'></td>", 1277 "</td><td class=%s align='right'>" % 1278 self.rowStyle[cuNum % 2], 1279 ("<div class='padd'>%s</div></td>") % versStr, 1280 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1281 "<div class='padd'>0</div></td>", 1282 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1283 "<div class='padd'>0</div></td>", 1284 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1285 "<div class='padd'>0</div></td>"]) 1286 except: 1287 if float(versStr) > 0: 1288 raise 1289 dataLine = ''.join( 1290 ["<td class='border'></td>", 1291 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1292 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1293 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1294 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]]) 1295 if self.sysc.isVSA() or self.sysc.isOSA(): 1296 dataLine = ''.join( 1297 [dataLine, 1298 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]]) 1299 return dataLine
1300 1301 #-------------------------------------------------------------------------- 1302
1303 - def getCu2Line(self, date, versStr, cuStats):
1304 """ 1305 Create HTML for CU2 data. 1306 1307 @param date: The date of the data for this line. 1308 @type date: datetime 1309 @param versStr: The version of the data for this line. 1310 @type versStr: str 1311 @param cuStats: Dictionary containing the data. 1312 @type cuStats: dict 1313 1314 @return: The HTML code for this line. 1315 @rtype: str 1316 1317 """ 1318 cuNum = 2 1319 try: 1320 subTableStyle = self.rowStyle[cuNum % 2] 1321 data = cuStats["cu%d" % cuNum][0][date.date][versStr] 1322 outData = str(data[1]) 1323 self.totalData["cu%d" % cuNum][versStr] += array(data[:2]) 1324 try: 1325 cu1Data = cuStats["cu1"][0][date.date][versStr] 1326 #for cu1d in cu1Data: 1327 #if data[0] == cu1Data[0]: 1328 if data[1] == cu1Data[3] - cu1Data[1]: 1329 subTableStyle = 'green' 1330 elif data[1] > cu1Data[3] - cu1Data[1]: 1331 subTableStyle = 'dgreen' 1332 outData += " (%+.2f)" % (data[1] - cu1Data[3] + cu1Data[1]) 1333 else: 1334 subTableStyle = 'red' 1335 outData += " (%+.2f)" % (data[1] - cu1Data[3] + cu1Data[1]) 1336 except (LookupError, TypeError): 1337 pass 1338 1339 try: 1340 dataInDBcu2 = cuStats["cu2"][0][date.date][versStr][2] 1341 if 0.5 < dataInDBcu2 < 1: 1342 print(date.date, versStr, cuStats["cu2"][0][date.date][ 1343 versStr]) 1344 except LookupError: 1345 dataInDBcu2 = -1 1346 1347 try: 1348 if cuStats["cu3"][0][date.date][versStr]: 1349 dataInDBcu3 = True 1350 else: 1351 dataInDBcu3 = False 1352 except LookupError: 1353 dataInDBcu3 = False 1354 1355 dataLineTickTempl = ''.join([ 1356 "<td class=%s align='center'><img src='%s' alt='%s'", 1357 " width='20px' border='0' title='%s'></a></td>"]) 1358 1359 if dataInDBcu2 == 1 and dataInDBcu3: 1360 if subTableStyle == 'red': 1361 tickImg = "calimg/yelltick_t.gif" 1362 tickTitle = "JPGs partially generated" 1363 else: 1364 tickImg = "calimg/greentick_t.gif" 1365 tickTitle = "JPGs ingested" 1366 dataLineTick = dataLineTickTempl % ( 1367 self.rowStyle[2], tickImg, tickTitle, tickTitle) 1368 1369 elif 0 <= dataInDBcu2 < 1 and dataInDBcu3: 1370 tickImg = "calimg/limetick_t.gif" 1371 tickTitle = "JPGs partially ingested (%.2f)" % dataInDBcu2 1372 dataLineTick = dataLineTickTempl % ( 1373 self.rowStyle[2], tickImg, tickTitle, tickTitle) 1374 1375 elif dataInDBcu2 < 0 and dataInDBcu3: 1376 if subTableStyle != self.rowStyle[cuNum % 2]: 1377 tickImg = "calimg/yelltick_t.gif" 1378 tickTitle = "JPGs partially generated" 1379 dataLineTick = dataLineTickTempl % ( 1380 self.rowStyle[2], tickImg, tickTitle, tickTitle) 1381 else: 1382 dataLineTick = ''.join([ 1383 "<td class=%s>&nbsp;</td>" % self.rowStyle[2]]) 1384 1385 else: 1386 dataLineTick = ''.join([ 1387 "<td class=%s>&nbsp;</td>" % self.rowStyle[2]]) 1388 1389 dataLine = ''.join( 1390 ["<td class='border'></td>", 1391 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1392 ("<div class='padd'>%s</div></td>") % versStr, 1393 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1394 "<div class='padd'>%d</div></td>" % data[0], 1395 "<td class=%s align='right'>" % subTableStyle, 1396 "<div class='padd'>%s</div></td>" % outData]) 1397 except (LookupError, TypeError): 1398 try: 1399 data = cuStats["cu3"][0][date.date][versStr] 1400 if data: 1401 tickImg = "calimg/redtick_t.gif" 1402 tickTitle = "no JPGs generated" 1403 dataLineTick = ''.join([ 1404 "<td class=%s align='center'>" % self.rowStyle[2], 1405 "<img src='%s' alt='%s'" % (tickImg, tickTitle), 1406 " width='20px' border='0' title='%s'></a>" % tickTitle, 1407 "</td>"]) 1408 else: 1409 dataLineTick = ''.join([ 1410 "<td class=%s>&nbsp;</td>" % self.rowStyle[2]]) 1411 1412 except LookupError: 1413 dataLineTick = ''.join([ 1414 "<td class=%s>&nbsp;</td>" % self.rowStyle[2]]) 1415 try: 1416 cu1Data = cuStats["cu1"][0][date.date][versStr] 1417 dataLine = ''.join( 1418 ["<td class='border'></td>", 1419 "</td><td class=%s align='right'>" % 1420 self.rowStyle[cuNum % 2], 1421 ("<div class='padd'>%s</div></td>") % versStr, 1422 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1423 "<div class='padd'>0</div></td>", 1424 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1425 "<div class='padd'>0</div></td>"]) 1426 except: 1427 dataLine = ''.join( 1428 ["<td class='border'></td>", 1429 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1430 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1431 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]]) 1432 return dataLineTick, dataLine
1433 1434 #-------------------------------------------------------------------------- 1435
1436 - def getCu3Line(self, date, versStr, cuStats):
1437 """ 1438 Create HTML for CU3 data. 1439 1440 @param date: The date of the data for this line. 1441 @type date: datetime 1442 @param versStr: The version of the data for this line. 1443 @type versStr: str 1444 @param cuStats: Dictionary containing the data. 1445 @type cuStats: dict 1446 1447 @return: The HTML code for this line. 1448 @rtype: str 1449 1450 """ 1451 cuNum = 3 1452 try: 1453 pixSubTableStyle = self.rowStyle[cuNum % 2] 1454 catSubTableStyle = self.rowStyle[cuNum % 2] 1455 tileSubTableStyle = self.rowStyle[cuNum % 2] 1456 data = cuStats["cu%d" % cuNum][0][date.date][versStr] 1457 pixOutData = str(data[0]) 1458 catOutData = str(data[1]) 1459 tileOutData = str(data[2]) 1460 self.totalData["cu%d" % cuNum][versStr] += array(data) 1461 try: 1462 cu1Data = cuStats["cu1"][0][date.date][versStr] 1463 #if data[0] == cu1Data[0]: 1464 # pix 1465 if data[0] == cu1Data[3] - cu1Data[1]: 1466 pixSubTableStyle = 'green' 1467 elif data[0] > cu1Data[3] - cu1Data[1]: 1468 pixSubTableStyle = 'dgreen' 1469 pixOutData += " (%+d)" % (data[0] - cu1Data[3] + cu1Data[1]) 1470 else: 1471 pixSubTableStyle = 'red' 1472 pixOutData += " (%+d)" % (data[0] - cu1Data[3] + cu1Data[1]) 1473 # cat 1474 if data[1] == cu1Data[1]: 1475 catSubTableStyle = 'green' 1476 elif data[1] > cu1Data[1]: 1477 catSubTableStyle = 'dgreen' 1478 catOutData += " (%+d)" % (data[1] - cu1Data[1]) 1479 else: 1480 catSubTableStyle = 'red' 1481 catOutData += " (%+d)" % (data[1] - cu1Data[1]) 1482 # tile 1483 if data[2] == cu1Data[2]: 1484 tileSubTableStyle = 'green' 1485 elif data[2] > cu1Data[2]: 1486 tileSubTableStyle = 'dgreen' 1487 tileOutData += " (%+d)" % (data[2] - cu1Data[2]) 1488 else: 1489 tileSubTableStyle = 'red' 1490 tileOutData += " (%+d)" % (data[2] - cu1Data[2]) 1491 except (LookupError, TypeError): 1492 pass 1493 dataLine = ''.join( 1494 ["<td class='border'></td>", 1495 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1496 ("<div class='padd'>%s</div></td>") % versStr, 1497 "<td class=%s align='right'>" % pixSubTableStyle, 1498 "<div class='padd'>%s</div></td>" % pixOutData, 1499 "<td class=%s align='right'>" % catSubTableStyle, 1500 "<div class='padd'>%s</div></td>" % catOutData]) 1501 if self.sysc.isVSA(): 1502 dataLine = ''.join( 1503 [dataLine, 1504 "<td class=%s align='right'>" % tileSubTableStyle, 1505 "<div class='padd'>%s</div></td>" % tileOutData]) 1506 dataLine = ''.join( 1507 [dataLine, 1508 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1509 "<div class='padd'>%d</div></td>" % data[3]]) 1510 except (LookupError, TypeError): 1511 try: 1512 cu1Data = cuStats["cu1"][0][date.date][versStr] 1513 dataLine = ''.join( 1514 ["<td class='border'></td>", 1515 "</td><td class=%s align='right'>" % 1516 self.rowStyle[cuNum % 2], 1517 ("<div class='padd'>%s</div></td>") % versStr, 1518 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1519 "<div class='padd'>0</div></td>", 1520 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1521 "<div class='padd'>0</div></td>", 1522 "<td class=%s align='right'>" % self.rowStyle[cuNum % 2], 1523 "<div class='padd'>0</div></td>"]) 1524 except: 1525 dataLine = ''.join( 1526 ["<td class='border'></td>", 1527 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1528 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1529 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2], 1530 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]]) 1531 if self.sysc.isVSA(): 1532 dataLine = ''.join( 1533 [dataLine, 1534 "<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]]) 1535 return dataLine
1536 1537 #-------------------------------------------------------------------------- 1538
1539 - def getCu4Line(self, date, versStr, cuStats):
1540 """ 1541 Create HTML for CU4 data. 1542 1543 @param date: The date of the data for this line. 1544 @type date: datetime 1545 @param versStr: The version of the data for this line. 1546 @type versStr: str 1547 @param cuStats: Dictionary containing the data. 1548 @type cuStats: dict 1549 1550 @return: The HTML code for this line. 1551 @rtype: str 1552 """ 1553 cuNum = 4 1554 try: 1555 progSubTableStyle = dict.fromkeys( 1556 ["versNum", "all"] + self.surveys.fullList, 1557 self.rowStyle[cuNum % 2]) 1558 dataRaw = cuStats["cu%d" % cuNum][0][date.date][versStr][0] 1559 dataRaw128 = cuStats["cu%d" % cuNum][0][date.date][versStr][4] 1560 self.totalData["cu%d" % cuNum][versStr] += array(dataRaw) 1561 dataPhoto = cuStats["cu%d" % cuNum][0][date.date][versStr][1] 1562 dataAstro = cuStats["cu%d" % cuNum][0][date.date][versStr][2] 1563 dataNumRows = cuStats["cu%d" % cuNum][0][date.date][versStr][3] 1564 dataNumRows128 = cuStats["cu%d" % cuNum][0][date.date][versStr][5] 1565 progOutData = dict.fromkeys( 1566 ["versNum", "all"] + self.surveys.fullList, 0) 1567 1568 try: 1569 #if dataRaw.versNum == dataPhoto.versNum \ 1570 # == dataAstro.versNum == dataNumRows.versNum: 1571 progOutData["versNum"] = versStr 1572 for fname in dataRaw._fields: 1573 #mouseOver = "R: %d\nP: %d\nA: %d\nF: %d" % ( 1574 #getattr(dataRaw, fname), 1575 #getattr(dataPhoto,fname), 1576 #getattr(dataAstro,fname), 1577 #getattr(dataNumRows,fname)) 1578 if getattr(dataRaw, fname) \ 1579 == getattr(dataPhoto, fname) \ 1580 == getattr(dataAstro, fname) \ 1581 == getattr(dataNumRows, fname): 1582 #mouseOver = "" 1583 #if fname != "versNum": 1584 progSubTableStyle[fname] = 'green' 1585 progOutData[fname] = "%d" % getattr( 1586 dataRaw, fname) 1587 #else: 1588 # progOutData[fname] = getattr(dataRaw,fname) 1589 elif getattr(dataRaw, fname) < getattr(dataPhoto, fname)\ 1590 and getattr(dataPhoto, fname) == getattr(dataAstro, fname)\ 1591 and getattr(dataRaw128, fname) > 0: 1592 progSubTableStyle[fname] = 'orchid' 1593 progOutData[fname] = "%d (%+d/%+d) (P%+d A%+d N%+d)" % ( 1594 getattr(dataRaw, fname), 1595 getattr(dataRaw, fname) - getattr(dataPhoto, fname), 1596 getattr(dataRaw, fname) - getattr(dataRaw128, fname), 1597 getattr(dataPhoto, fname), 1598 getattr(dataAstro, fname), 1599 getattr(dataNumRows, fname)) 1600 elif getattr(dataRaw, fname) != getattr(dataPhoto, fname)\ 1601 or getattr(dataRaw, fname) != getattr(dataAstro, fname)\ 1602 or getattr(dataPhoto, fname) != getattr(dataAstro, fname): 1603 progSubTableStyle[fname] = 'orange' 1604 #progOutData[fname] = "%d (%+d)" % ( 1605 progOutData[fname] = "%d (%+d) (P%+d A%+d N%+d)" % ( 1606 getattr(dataRaw, fname), 1607 getattr(dataRaw, fname) - getattr(dataPhoto, fname), 1608 getattr(dataPhoto, fname), 1609 getattr(dataAstro, fname), 1610 getattr(dataNumRows, fname)) 1611 elif getattr(dataRaw, fname) == 0 \ 1612 and getattr(dataNumRows, fname) > 0: 1613 progSubTableStyle[fname] = 'dred' 1614 progOutData[fname] = "%d (%+d)" % ( 1615 getattr(dataRaw, fname), 1616 getattr(dataRaw, fname) - getattr(dataNumRows, fname)) 1617 elif getattr(dataRaw, fname) < getattr(dataNumRows, fname): 1618 progSubTableStyle[fname] = 'yellow' 1619 progOutData[fname] = "%d (%+d)" % ( 1620 getattr(dataRaw, fname), 1621 getattr(dataRaw, fname) - getattr(dataNumRows, fname)) 1622 elif getattr(dataRaw, fname) > getattr(dataNumRows, fname)\ 1623 and getattr(dataRaw, fname) \ 1624 == getattr(dataPhoto, fname) \ 1625 == getattr(dataAstro, fname): 1626 progSubTableStyle[fname] = 'cyan' 1627 progOutData[fname] = "%d (P%+d A%+d N%+d)" % ( 1628 getattr(dataRaw, fname), 1629 getattr(dataPhoto, fname), 1630 getattr(dataAstro, fname), 1631 getattr(dataNumRows, fname)) 1632 else: 1633 progSubTableStyle[fname] = 'dgreen' 1634 progOutData[fname] = "%d (P%+d A%+d N%+d)" % ( 1635 getattr(dataRaw, fname), 1636 getattr(dataPhoto, fname), 1637 getattr(dataAstro, fname), 1638 getattr(dataNumRows, fname)) 1639 if getattr(dataNumRows128, fname) > 0: 1640 progOutData[fname] = progOutData[fname] + "{D%+d}" % ( 1641 getattr(dataNumRows128, fname)) 1642 except (LookupError, TypeError): 1643 pass 1644 1645 # for fname in dataRaw._fields: 1646 # print fname,":",progSubTableStyle,"::",mouseOver,"::",progOutData 1647 # 1648 # dataLine = "<td class='border'></td>" + ''.join( 1649 # ["<td class=%s align='right'><div %s class='padd'>%s</td>" % 1650 # (progSubTableStyle[fname], mouseOver[fname], 1651 # progOutData[fname]) 1652 1653 dataLine = ''.join([ 1654 "<td class='border'></td>" , 1655 "<td class=%s align='right'><div class='padd'>%s</td>" % 1656 (progSubTableStyle["versNum"], progOutData["versNum"]), 1657 ''.join( 1658 ["<td class=%s align='right'><div class='padd'>%s</td>" % 1659 (progSubTableStyle[fname], 1660 progOutData[fname]) 1661 for fname in dataRaw._fields])]) 1662 except (LookupError, TypeError): 1663 dataLine = "<td class='border'></td>" + ''.join( 1664 ["<td class=%s>&nbsp;</td>" % self.rowStyle[cuNum % 2]] * 1665 (len(self.surveys.fullList) + 2)) 1666 return dataLine
1667 1668 #-------------------------------------------------------------------------- 1669
1670 - def createBigTable(self, cuList, semester, cuStatistics):
1671 """Create and fill table for all CUs. 1672 1673 @param cuList: List of CUs the table is filled for. 1674 @type cuList: list 1675 @param semester: The semester the HTML is created for. 1676 @type semester: str 1677 @param cuStatistics: The collected information for the CUs. 1678 @type cuStatistics: cuStats object 1679 1680 """ 1681 monthOld = 0 1682 startDate, endDate = cuStatistics.obsCal.getDates(semester) 1683 self.totalData = {} 1684 1685 for cuNum in range(1, 5): 1686 self.totalData["cu" + str(cuNum)] = {} 1687 1688 # look up available versions and create list 1689 versDict = defaultdict(list) 1690 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 1691 versSet = set() 1692 date = mxTime.DateTimeFromMJD(mjd) 1693 versDict[date] = ['-1'] 1694 1695 for versStr in cuStatistics.versNums[semester]: 1696 dataAvail = 0 1697 for cuNum in [1, 3]: 1698 try: 1699 if cuStatistics.cuStats["cu%d" % cuNum][0][ 1700 date.date][versStr]: 1701 statidx = (-1 if "VSA" in self.archive else 0) 1702 dataAvail += cuStatistics.cuStats["cu%d" % cuNum][ 1703 0][date.date][versStr][statidx] 1704 except LookupError: 1705 pass 1706 if dataAvail > 0: 1707 versSet.add(versStr) 1708 if date not in cuStatistics.obsCal.notObsDates()[semester] \ 1709 and versSet: 1710 versDict[date] = sorted(versSet) 1711 versList = [x for x in sorted(set(utils.unpackList(versDict.values()))) 1712 if x != '-1'] 1713 1714 versDateDict = utils.invertDict(versDict, True) 1715 # initialise totalData 1716 for versStr in versList: 1717 if versStr not in self.totalData["cu1"]: 1718 self.totalData["cu1"][versStr] = array([0, 0, 0, 0, 0]) 1719 if versStr not in self.totalData["cu2"]: 1720 self.totalData["cu2"][versStr] = array([0, 0]) 1721 if versStr not in self.totalData["cu3"]: 1722 self.totalData["cu3"][versStr] = array([0, 0, 0, 0]) 1723 if versStr not in self.totalData["cu4"]: 1724 self.totalData["cu4"][versStr] = array([0] * ( 1725 len(self.surveys.fullList) + 1)) 1726 1727 for mjd in xrange(int(startDate.mjd), int(endDate.mjd) + 1): 1728 date = mxTime.DateTimeFromMJD(mjd) 1729 if date in cuStatistics.obsCal.notObsDates()[semester]: 1730 self.rowStyle = ['nodata', 'nodata', 'nodata'] 1731 else: 1732 self.rowStyle = ['v1', 'v2', 'v'] 1733 headStyle = ['h1', 'h2', 'h'] 1734 if monthOld != date.month: 1735 if monthOld != 0: 1736 tableEnd = "</TABLE>" 1737 self.writetheline(tableEnd) 1738 monthOld = date.month 1739 titleLine = ''.join(["<center><h1>", 1740 mxTime.Month[date.month], "&nbsp;", 1741 str(date.year), "&nbsp;(", semester, 1742 ")</h1></center>"]) 1743 self.writetheline(titleLine) 1744 if self.sysc.isVSA(): 1745 globColSpan = {1: [5, 0], 2: [3, 0], 3: [5, 0], 1746 4: [len(self.surveys.fullList) + 2, 0]} 1747 elif self.sysc.isOSA(): 1748 globColSpan = {1: [5, 0], 2: [3, 0], 3: [4, 0], 1749 4: [len(self.surveys.fullList) + 2, 0]} 1750 else: 1751 globColSpan = {1: [4, 0], 2: [3, 0], 3: [4, 0], 1752 4: [len(self.surveys.fullList) + 2, 0]} 1753 1754 for cuNum in globColSpan: 1755 globColSpan[cuNum][1] = globColSpan[cuNum][0] * [70] 1756 globColSpan[cuNum][1][0] -= 20 1757 if cuNum == 1: 1758 globColSpan[cuNum][1][-1] += 30 1759 1760 totalWidth = 64 1761 for cuNum in cuList: 1762 totalWidth += sum(globColSpan[cuNum][1]) + 1 1763 tableBeg = ''.join(["<TABLE border=0 bgcolor=#888888", 1764 " width=%dpx'" % totalWidth, 1765 " cellspacing=1", " cellpadding=1>", 1766 "<col width='42px'>", "<col width='22px'>"]) 1767 for cuNum in cuList: 1768 colSpan = ''.join(["<col width='%dpx'>" % x 1769 for x in globColSpan[cuNum][1]]) 1770 tableBeg = ','.join([tableBeg, ''.join([ 1771 "<col width='1px'>", "<colgroup>", 1772 colSpan, "</colgroup>"])]) 1773 if cuNum == 4: 1774 tableBeg += "<col width='42px'>" 1775 tableBeg = ''.join([tableBeg, 1776 "\n<tr><th class=%s>" % headStyle[2], 1777 "<div class='padd'>Date</div></th>", 1778 "<th class=%s" % headStyle[2], 1779 " align='center'><a>", 1780 "<img src='calimg/bluemag_t.gif'", 1781 " alt='Ready for inspection' width='20px'", 1782 " border='0'", 1783 " title='Ready for inspection'>", 1784 "</a></th>"]) 1785 for cuNum in cuList: 1786 if cuNum == 1: 1787 tableBeg += ''.join([ 1788 "<th></th><th class=%s" % headStyle[cuNum % 2], 1789 " colspan=%d>" % globColSpan[cuNum][0], 1790 "<div class='padd'>CU1 (transfer from CASU)", 1791 "<br>(version | #sci | #cat |", 1792 (" (#tile) |" if self.sysc.isVSA() else ( 1793 " (#rawcat) |" if self.sysc.isOSA() else '')), 1794 " #all <font color='#7d7d7d'>[FFLU]</font>)<br>", 1795 "</div></th>"]) 1796 elif cuNum == 2: 1797 tableBeg += ''.join([ 1798 "<th></th><th class=%s" % headStyle[cuNum % 2], 1799 " colspan=%d>" % globColSpan[cuNum][0], 1800 "<div class='padd'>CU2 (JPEGs calculated)", 1801 "<br>(version | #jpgs | #files)<br>", 1802 "</div></th>"]) 1803 elif cuNum == 3: 1804 tableBeg += ''.join([ 1805 "<th></th><th class=%s" % headStyle[cuNum % 2], 1806 " colspan=%d>" % globColSpan[cuNum][0], 1807 "<div class='padd'>CU3 (Image metadata ingested)", 1808 "<br>(version | #pix | #cat |", 1809 (" (#tile) |" if self.sysc.isVSA() else ''), 1810 " #all)<br>", 1811 "</div></th>"]) 1812 elif cuNum == 4: 1813 headProgs = ' | '.join(["#%s" % key.replace('_', '&amp;') 1814 for key in self.surveys.fullList]) 1815 tableBeg += ''.join([ 1816 "<th></th><th class=%s" % headStyle[cuNum % 2], 1817 " colspan=%d>" % globColSpan[cuNum][0], 1818 "<div class='padd'>CU4 (Detections ingested)<br>", 1819 "(version | %s | #all)" % headProgs, 1820 "<br></div></th>", "<th class=%s>" % headStyle[2], 1821 "<div class='padd'>Date</div></th>"]) 1822 else: 1823 tableBeg += ''.join([ 1824 "<th class=%s>" % headStyle[2], 1825 "<div class='padd'>CU%d" % cuNum, 1826 "</div></th>"]) 1827 tableBeg += "</tr>" 1828 self.writetheline(tableBeg) 1829 1830 # set the rowspan of the date 1831 rowspan = (1 if date in cuStatistics.obsCal.notObsDates()[semester] 1832 else len(versDict[date])) 1833 1834 dateCol = ''.join(["<tr class=%s>" % self.rowStyle[2], 1835 "<td class=%s" % self.rowStyle[2], 1836 " rowspan=%d" % rowspan, 1837 " align='right'><div class='padd'>", 1838 str(date.day), "&nbsp;</div></td>"]) 1839 self.writetheline(dateCol) 1840 dataLineTick = ''.join(["<td class=", self.rowStyle[2], ">", 1841 "&nbsp;", "</td>"]) 1842 dataLineCU = {} 1843 for cuNum in cuList: 1844 dataLineCU[cuNum] = '' 1845 1846 for versStr in versDict[date]: 1847 for cuNum in cuList: 1848 if cuNum == 1: 1849 dataLineCU[1] = self.getCu1Line( 1850 date, versStr, cuStatistics.cuStats) 1851 elif cuNum == 2: 1852 dataLineTick, dataLineCU[2] = self.getCu2Line( 1853 date, versStr, cuStatistics.cuStats) 1854 elif cuNum == 3: 1855 dataLineCU[3] = self.getCu3Line( 1856 date, versStr, cuStatistics.cuStats) 1857 elif cuNum == 4: 1858 dataLineCU[4] = self.getCu4Line( 1859 date, versStr, cuStatistics.cuStats) 1860 1861 # write date 1862 dateCu4Col = ( 1863 ''.join([ 1864 "<td class=%s rowspan=%d" % (self.rowStyle[2], rowspan), 1865 " align='right'>", 1866 "<div class='padd'>%d&nbsp;</div></td>" % date.day]) 1867 if versStr == min(versDict[date]) 1868 or versStr == '-1' else '') 1869 1870 self.writetheline(dataLineTick) 1871 for cuNum in cuList: 1872 self.writetheline(dataLineCU[cuNum]) 1873 if cuNum == 4: 1874 self.writetheline(dateCu4Col) 1875 self.writetheline("</tr>") 1876 1877 self.writetheline("<tr></tr>") 1878 1879 # write total amount 1880 self.rowStyle = ['v1', 'v2', 'v'] 1881 dateCol = ''.join(["<tr class=", headStyle[2], ">", 1882 "<th class=", headStyle[2], 1883 " rowspan=%d" % len(versList), 1884 " align='right'><div class='padd'>", 1885 "total", "&nbsp;</div></th>"]) 1886 self.writetheline(dateCol) 1887 1888 dataLineTotal = {} 1889 for versStr in versList: 1890 data = list(self.totalData["cu1"][versStr]) 1891 tickCol = ''.join([ 1892 "<th class=", headStyle[2], ">", 1893 "%dd" % (len(versDateDict[versStr])), 1894 "</th>"]) 1895 dataLineTotal[1] = ''.join( 1896 ["<td class='border'></td>", 1897 "<td class=", self.rowStyle[1], 1898 " align='right'><div class='padd'>", 1899 versStr, "</div></td>", 1900 "<td class=", self.rowStyle[1], 1901 " align='right'><div class='padd'>", 1902 str(int(data[0])), "</div></td>", 1903 "<td class=", self.rowStyle[1], 1904 " align='right'><div class='padd'>", 1905 str(int(data[1])), "</div></td>"]) 1906 if self.sysc.isVSA() or self.sysc.isOSA(): 1907 dataLineTotal[1] = ''.join( 1908 [dataLineTotal[1], 1909 "<td class=", self.rowStyle[1], 1910 " align='right'><div class='padd'>", 1911 str(int(data[2])), "</div></td>"]) 1912 dataLineTotal[1] = ''.join( 1913 [dataLineTotal[1], 1914 "<td class=", self.rowStyle[1], 1915 " align='right'><div class='padd'>", 1916 str(int(data[3])), "</div></td>"]) 1917 data = list(self.totalData["cu2"][versStr]) 1918 dataLineTotal[2] = ''.join( 1919 ["<td class='border'></td>", 1920 "<td class=", self.rowStyle[0], 1921 " align='right'><div class='padd'>", 1922 versStr, "</div></td>", 1923 "<td class=", self.rowStyle[0], 1924 " align='right'><div class='padd'>", 1925 str(data[0]), "</div></td>", 1926 "<td class=", self.rowStyle[0], 1927 " align='right'><div class='padd'>", 1928 str(data[1]), "</div></td>"]) 1929 data = list(self.totalData["cu3"][versStr]) 1930 dataLineTotal[3] = ''.join( 1931 ["<td class='border'></td>", 1932 "<td class=", self.rowStyle[1], 1933 " align='right'><div class='padd'>", 1934 versStr, "</div></td>", 1935 "<td class=", self.rowStyle[1], 1936 " align='right'><div class='padd'>", 1937 str(data[0]), "</div></td>", 1938 "<td class=", self.rowStyle[1], 1939 " align='right'><div class='padd'>", 1940 str(data[1]), "</div></td>"]) 1941 if self.sysc.isVSA(): 1942 dataLineTotal[3] = ''.join( 1943 [dataLineTotal[3], 1944 "<td class=", self.rowStyle[1], 1945 " align='right'><div class='padd'>", 1946 str(data[2]), "</div></td>"]) 1947 dataLineTotal[3] = ''.join( 1948 [dataLineTotal[3], 1949 "<td class=", self.rowStyle[1], 1950 " align='right'><div class='padd'>", 1951 str(data[3]), "</div></td>"]) 1952 data = list(self.totalData["cu4"][versStr]) 1953 dataLineTotal[4] = \ 1954 "<td class='border'></td>" + \ 1955 "<td class=%s align='right'><div class='padd'>%s</td>" % ( 1956 self.rowStyle[0], versStr) + ''.join( 1957 ["<td class=%s align='right'><div class='padd'>%s</td>" % ( 1958 self.rowStyle[0], str(data[i])) 1959 for i in range(0, len(self.surveys.fullList) + 1)]) 1960 self.writetheline(tickCol) 1961 for cuNum in cuList: 1962 self.writetheline(dataLineTotal[cuNum]) 1963 self.writetheline("</tr>") 1964 tableEnd = "</TABLE>" 1965 self.writetheline(tableEnd)
1966 1967 #------------------------------------------------------------------------------ 1968 # Entry point for script 1969 1970 # Allow module to be imported as well as executed from the command line 1971 if __name__ == "__main__": 1972 # Define additional command-line interface options for CreateCalendar 1973 CLI.progOpts.remove("test") 1974 CLI.progOpts += [ 1975 CLI.Option('D', "daily", 1976 "produce daily HTML"), 1977 CLI.Option('R', "remstats", 1978 "remove stats files for the given CU (1/2)", "NUMBER", '0', 1979 isValOK=lambda x: x.isdigit() and x in "12"), 1980 CLI.Option('S', "surveys", 1981 "Surveys to calculate CU4 for (exclude surveys with prefix 'x-')", "LIST", 'all'), 1982 CLI.Option('o', "outdir", 1983 "directory where the output files are created", 1984 "DIR", os.path.join(SystemConstants.developDisks[0], "monitoring")), 1985 CLI.Option('s', "semester", 1986 "semester for which a calendar should be produced", 1987 "SEMESTER"), 1988 CLI.Option('v', "versions", 1989 "list of version numbers for which a calendar should be produced", 1990 "LIST"), 1991 CLI.Option('x', "exclude", 1992 "CUs to be excluded from processing", 1993 "LIST")] 1994 1995 cli = CLI("CreateCalendar", "$Revision: 10243 $", __doc__) 1996 Logger.isVerbose = False 1997 Logger.addMessage(cli.getProgDetails()) 1998 1999 dataSubDir = "data" 2000 htmlSubDir = "html" 2001 outDir = cli.getOpt("outdir") 2002 utils.ensureDirExist(outDir) 2003 utils.ensureDirExist(os.path.join(outDir, dataSubDir)) 2004 utils.ensureDirExist(os.path.join(outDir, htmlSubDir)) 2005 archive = cli.getArg("database").split('.')[-1] 2006 sysc = SystemConstants(archive) 2007 if not cli.getOpt("semester"): 2008 obsSem = sorted(sysc.obsCal.semDates) 2009 else: 2010 obsSem = sysc.obsCal.checkSem(cli.getOpt("semester")) 2011 if not obsSem: 2012 Logger.addMessage("No semester", cli.getOpt("semester")) 2013 raise SystemExit 2014 2015 timestamp = utils.makeTimeStamp().replace(' ', '_') 2016 timestampShort = timestamp[:timestamp.find('_')] 2017 2018 # Ordered list of all programmes ; ensure that cal* is last. 2019 if sysc.isWSA(): 2020 surveys = Programmes( 2021 ["dxs", "gcs", "gps", "las", "uds", "cal_ns"], 2022 ["DXS", "GCS", "GPS", "LAS", "UDS", 2023 "CALNS (CAL &amp; Non-Surveys)"]) 2024 # add UHS for 2012+ 2025 if any(int(sem[:-1]) > 2011 for sem in obsSem): 2026 surveys.fullList.insert(5, "uhs") 2027 surveys.description.insert(5, "UHS") 2028 elif sysc.isVSA(): 2029 if "SV" in obsSem: 2030 surveys = Programmes( 2031 ["ultravista", "vhs", "video", "viking", 2032 "vmc", "vvv", "svOrion", "svNgc253", "ddt_ns", "cal_comm"], 2033 ["ULTRAVISTA", "VHS", "VIDEO", "VIKING", "VMC", "VVV", 2034 "SVORION", "SVNGC253", "DDT", "CAL &amp; COMM"]) 2035 else: 2036 surveys = Programmes( 2037 ["ultravista", "vhs", "video", "viking", 2038 "vmc", "vvv", "ddt_ns", "cal_comm"], 2039 ["ULTRAVISTA", "VHS", "VIDEO", "VIKING", "VMC", "VVV", 2040 "DDT &amp; NS", "CAL &amp; COMM"]) 2041 elif sysc.isOSA(): 2042 surveys = Programmes( 2043 ["atlas", "kids", "vphas", "tech_ns", "cal"], 2044 ["ATLAS", "KIDS", "VPHAS+", "TECH &amp; NS", "CAL"]) 2045 if any(sysc.obsCal.getDates(sem)[0] >= sysc.obsCal.getDates("P89")[0] 2046 for sem in obsSem): 2047 surveys.fullList.insert(5, "n089a0483") 2048 surveys.description.insert(5, "U_Chile") 2049 2050 if cli.getOpt("surveys") != "all": 2051 optionalSurveys = [x for x in cli.getOpt("surveys").lower().split(',') 2052 if not x.startswith("x-")] 2053 if not optionalSurveys: 2054 optionalSurveys = surveys.fullList 2055 notTheSurveys = [x.replace("x-", '') 2056 for x in cli.getOpt("surveys").lower().split(',') 2057 if x.startswith("x-")] 2058 notTheSurveys.extend(x.lower() for x in surveys.fullList 2059 if x not in optionalSurveys) 2060 notTheSurveys = list(set(notTheSurveys)) 2061 2062 while notTheSurveys: 2063 surv = notTheSurveys.pop() 2064 surveys.description.remove(surveys.description[ 2065 surveys.fullList.index(surv)]) 2066 surveys.fullList.remove(surv) 2067 2068 excludeCus = \ 2069 (csv.values(cli.getOpt("exclude")) if cli.getOpt("exclude") else ()) 2070 2071 cuList = [1, 2, 3, 4] 2072 2073 CuStats.surveys = surveys 2074 CuStats.obsCal = sysc.obsCal 2075 CuStats.archive = DbSession(cli.getArg("database"), 2076 userName=cli.getOpt("user")) 2077 mainServer = CuStats.archive.server 2078 2079 versions = ("1.3" if mainServer == "ramses9" and not cli.getOpt("versions") 2080 else cli.getOpt("versions")) 2081 2082 tmpVersNums = defaultdict(list) 2083 if versions: 2084 versionList = versions.split(',') 2085 for sem in CuStats.obsCal.versNums: 2086 for vers in sorted(versionList): 2087 if vers in CuStats.obsCal.versNums[sem]: 2088 tmpVersNums[sem].append(vers) 2089 CuStats.versNums = tmpVersNums 2090 else: 2091 versionList = [] 2092 CuStats.versNums = CuStats.obsCal.versNums 2093 2094 cuStats = CuStats() 2095 cuStats.remStatsCU = cli.getOpt("remstats") 2096 2097 if '1' not in excludeCus: 2098 cuStats.getCu1Stats(obsSem) 2099 if '2' not in excludeCus: 2100 cuStats.getCu2Stats(obsSem) 2101 if '3' not in excludeCus: 2102 cuStats.getCu3Stats(obsSem) 2103 if '4' not in excludeCus: 2104 cuStats.getCu4Stats(obsSem) 2105 CuStats.archive = None 2106 2107 # initialize the HTML file 2108 Logger.addMessage("Creating web page...") 2109 timeStampAttr = (''.join(['_', timestampShort]) 2110 if cli.getOpt("daily") else '') 2111 if sysc.isVSA(): 2112 archive = ("VSA_v1_3" if mainServer == "ramses9" 2113 else "VSA_v1_2") 2114 if surveys.fullList == ["vvv"]: 2115 archive = "VSAVVV_v1_3" 2116 2117 htmlFileName = (''.join([archive, "Monitor", '_', obsSem[0], 2118 timeStampAttr, ".html"]) 2119 if len(obsSem) == 1 2120 else ''.join([archive, "Monitor", timeStampAttr, ".html"])) 2121 2122 htmlFile = CalendarHTML(os.path.join(outDir, htmlSubDir, htmlFileName)) 2123 htmlFile.sysc = sysc 2124 htmlFile.archive = archive 2125 htmlFile.surveys = cuStats.surveys 2126 htmlFile.Statistics = cuStats.Statistics 2127 htmlFile.wopen() 2128 title = "VISTA" if sysc.isVSA() else sysc.instrumentName 2129 title += " Science Archive Curation Statistics" 2130 title += " (%s)" % (obsSem[0] if len(obsSem) == 1 else timeStampAttr) 2131 htmlFile.writeHeader(title, "wsacalendar.css", 2132 baseUrl=sysc.surveyBaseUrl()) 2133 htmlFile.writetheline( 2134 "<div id=\"dhtmltooltip\"></div>\n" 2135 "<script type=\"text/JavaScript\" src=\"tooltip.js\">\n" 2136 "document.onmousemove=positiontip</script>\n") 2137 2138 for sem in obsSem: 2139 htmlFile.writeTimestamp( 2140 sem + (" [v1.3]" if "1.3" in versionList else ''), 2141 utils.makeTimeStamp(), sysc.loadDatabase) 2142 htmlFile.createLegend() 2143 htmlFile.createBigTable(cuList, sem, cuStats) 2144 2145 # close the HTML file 2146 htmlFile.writeFoot() 2147 htmlFile.close() 2148 Logger.addMessage("Web page: %s" % htmlFile.name) 2149 Logger.addMessage("...finished.") 2150 2151 #------------------------------------------------------------------------------ 2152 # Change log: 2153 # 2154 # 11-May-2007, ETWS: First version 2155 # 23-May-2007, ETWS: Refactored. 2156 # 24-May-2007, ETWS: Fixed bugs, included legend. 2157 # 01-Jun-2007, JB: Updated list of dates with no data for 06B. 2158 # 06-Jun-2007, ETWS: Fixed bugs, included CU4 output, included timestamp 2159 # of file generation. 2160 # Updated list of dates with no data for 06A. 2161 # 07-Jun-2007, ETWS: Fixed ingestion tickmark bug. 2162 # 27-Jun-2007, ETWS: Extended legend. 2163 # 28-Jun-2007, ETWS: Included handling of files observed out of 2007A bounds. 2164 # 25-Jul-2007, ETWS: Fixed output for CU4; changed DB read to dirty read; 2165 # updated 07A/B dates. 2166 # 08-Aug-2007, ETWS: Included CAL and Non-Surveys in CU4 output. 2167 # 24-Aug-2007, ETWS: Updated 06B un-processed days. 2168 # 28-Aug-2007, ETWS: Included handling of files observed out of 2006B bounds. 2169 # 21-Sep-2007, ETWS: Fixed output issues. 2170 # 25-Oct-2007, ETWS: Updated list of dates with no data for 07B. 2171 # 28-Jan-2008, ETWS: Another update of dates with no data for 07B; 2172 # included total sums of each column. 2173 # 7-Feb-2008, ETWS: Included possibility to exclude CUs from output. 2174 # 8-Feb-2008, ETWS: Updated queries to use FlatFileLookUp table. 2175 # 19-Feb-2008, ETWS: Fixed CU4 query. 2176 # 3-Mar-2008, ETWS: Updated list of dates with no data for 07B; 2177 # included 2008A main dates. 2178 # 4-Mar-2008, ETWS: Fixed unnecessary calls to WfcamCal initialization. 2179 # 5-Mar-2008, ETWS: Speeded up CU4 queries; tidied up defaultdict usage. 2180 # 23-Apr-2008, ETWS: Updated list of dates with no data for 08A. 2181 # 30-May-2008, ETWS: Included creation of WFCAMPROPRIETY monitor pages; 2182 # enhanced CU4 output for better overview of photometry 2183 # ingests. 2184 # 3-Jun-2008, ETWS: Included date on the right side if CU4 data is available. 2185 # 18-Jul-2008, ETWS: Replaced numarray with numpy. 2186 # 1-Aug-2008, ETWS: Excluded deprecated directories. 2187 # 4-Mar-2009, RSC: Updated to use DbSession and CLI modules. 2188 # 11-Mar-2009, ETWS: Fixed bug that omitted data for the last day of a semester 2189 # for versions greater than 1. 2190 # 7-Sep-2009, ETWS: Fixed code to reflect changes to cPickle in Python 2.6. 2191