1
2
3 """
4 Automatic programme setup tools to determine the correct curation settings.
5 Updates the curation tables in the database and creates the necessary
6 schema files.
7
8 @author: N.J.G. Cross
9 @org: WFAU, IfA, University of Edinburgh
10
11 @newfield contributors: Contributors, Contributors (Alphabetical Order)
12 @contributors: R.S. Collins
13
14 @todo: Make sure new pointingString and old pointingString refer to same
15 positions
16 @todo: Need to replace some hard wired constants with database constants
17 parsed from WSA_InitiateArchive.sql. Therefore, may need to add more
18 constants to that SQL script.
19 """
20
21 from __future__ import division, print_function
22 from future_builtins import zip
23
24 from bisect import bisect
25 from collections import defaultdict, namedtuple
26 import math
27 import numpy
28 from operator import itemgetter
29 import os
30
31 import wsatools.Astrometry as astro
32 import wsatools.Banner as banner
33 import wsatools.DataFactory as df
34 import wsatools.DbConnect.CommonQueries as queries
35 from wsatools.DbConnect.CuSession import CuSession
36 import wsatools.DbConnect.DbConstants as dbc
37 from wsatools.DbConnect.DbSession import Join, SelectSQL
38 import wsatools.DbConnect.Schema as schema
39 import wsatools.ExternalProcess as extp
40 from wsatools.ExternalProductConverter import ExternalProduct
41 from wsatools.Logger import Logger
42 from wsatools.ObsCalendar import DateRange
43 import wsatools.Statistics as stats
44 from wsatools.SystemConstants import DepCodes, SystemConstants
45 import wsatools.Utilities as utils
46
47
49 """
50 Automatically determines the correct curation settings for a given
51 programme. Updates the curation tables in the database and creates the
52 necessary schema files.
53
54 """
55
56
57
58 _autoCommit = True
59 cuNum = 21
60
61
62
63
64
65 bandMergeCrit = None
66 createSchemaOnly = False
67 dateRange = CuSession.sysc.obsCal.dateRange()
68 maxEllipticity = 1.0
69 maxSeeing = 2.0
70 newPoints = False
71 numberStks = dbc.intDefault()
72 redoSchema = False
73 redoSetup = True
74 displayDeeps = False
75
76
77
110
111
112
114 """
115 Update schema files for current non-survey programme using the latest
116 version of sql/Templates/*_autoTemplateSchema.sql and the latest
117 curation settings in the database for this programme.
118
119 @param syncToDb: If True, update existing schema in database with new
120 schema, for all post-Detection tables.
121 @type syncToDb: bool
122
123 @todo: Combine surveyparser.py functionality into this function, which
124 will allow surveyparser.py to be just an interface to this class
125 and improve consistency between survey and non-survey schema.
126 """
127 try:
128 progIDs = list(self.programmeID if self.programmeID else
129 self.programme.getProgIDList(onlyNonSurvey=self.onlyNonSurveys,
130 onlySurvey=self.onlySurveys))
131 except TypeError:
132 progIDs = [self.programmeID]
133
134 if any(progID in self.sysc.manualProgs for progID in progIDs):
135 raise ProgrammeBuilder.CuError("Can only create schema for "
136 "automatically set up programmes")
137
138 for progID in sorted(progIDs):
139 self.programme.setCurRow(programmeID=progID)
140 Logger.addMessage("Creating schema for programme " +
141 self.programme.getAcronym().upper())
142
143 if syncToDb and self.programme.isNonSurvey():
144
145
146
147
148 progSchema = \
149 schema.parseTables(self.programme.getSchemaScript())
150
151 for table in reversed(progSchema):
152 if not table.name.endswith(("Raw", "Astrometry",
153 "Photometry", "Detection",
154 "TileSet", "TilePawPrints")):
155
156 Logger.addMessage("Dropping table %s" % table)
157 self.archive.dropTable(table, progSchema)
158
159 schemaPath = self.sysc.autoTemplateSchema
160 if self.programme.isSatelliteProg():
161 schemaPath = \
162 self.sysc.sqlTemplatePath("WSA_SatelliteTemplateSchema.sql")
163
164 schemaFiles = [(schemaPath, os.path.join(self.sysc.sqlScriptPath,
165 self.programme.getSchemaScript()))]
166
167 if self.programme.getAttr("neighboursSchema") \
168 not in (dbc.charDefault(), "WSA_NeighboursSchema.sql"):
169 schemaFiles.append(
170 (self.sysc.autoTemplateNeighboursSchema,
171 os.path.join(self.sysc.sqlScriptPath,
172 self.programme.getAttr("neighboursSchema"))))
173
174
175 isUpdatedForTable = self.createProgSchema(schemaFiles, progIDs)
176
177
178 if syncToDb:
179 progSchema = \
180 schema.parseTables(self.programme.getSchemaScript())
181
182 Logger.addMessage("Synchronising database to latest schema...")
183 for table in progSchema:
184 if self.archive.createTable(table, progSchema,
185 overWrite=isUpdatedForTable.get(table.name)):
186 Logger.addMessage("Created table %s" % table)
187
188
189 indexFiles = []
190 viewFiles = []
191 if self.onlyNonSurveys or not self.onlySurveys:
192 indexFiles.append((self.sysc.nsIndexScript,
193 self.programme.getProgIDList(onlyNonSurvey=True)))
194
195 if not self.sysc.isWSA() \
196 and (self.onlySurveys or not self.onlyNonSurveys):
197
198 indexFiles.append((self.sysc.indexScript,
199 self.programme.getProgIDList(onlySurvey=True)))
200
201 viewFiles.append((self.sysc.surveyViewScript,
202 self.programme.getProgIDList(onlySurvey=True)))
203
204 try:
205 for schemaFileName, progIDs in indexFiles:
206 self.createIndexSchema(schemaFileName, progIDs)
207
208 for schemaFileName, progIDs in viewFiles:
209 self.createIndexSchema(schemaFileName, progIDs, isView=True)
210
211 finally:
212
213 if not isinstance(self.programmeID, set):
214 self.programme.setCurRow(programmeID=self.programmeID)
215
216
217
241
242
243
245 """
246 Parses *autoTemplate.sql to produce the custom schema for the current
247 programme.
248
249 @param schemaFiles: List of schema template and programme files.
250 @type schemaFiles: list(tuple(str, str))
251 @param progIDs: List of programme IDs currently being processed.
252 @type progIDs: list(int)
253
254 @returns: Dictionary indicating the updated status of each table.
255 @rtype: dict(str: bool)
256
257 """
258
259
260
261
262
263
264 fullName = self.programme.getName()
265 if self.programme.isNonSurvey():
266 fullName = fullName[0].lower() + fullName[1:]
267
268 stdSubs = [("&template&", self.programme.getAcronym()),
269 ("&TEMPLATE&", self.programme.getAcronym().upper()),
270 ("&templatelong&", fullName),
271 ("@dataBase", self.archive.database if self.archive.database != 'VSAVVV' else 'VSA')]
272 bandUcdDict = {'z':'em.opt.I',
273 'y':'em.IR.NIR',
274 'j':'em.IR.J',
275 'h':'em.IR.H',
276 'ks':'em.IR.K'}
277 statusForTable = {}
278 synSetup = SynopticSetup(self.programme, self.dateRange)
279 for templatePath, schemaPath in schemaFiles:
280 status = Status(self, progIDs, synSetup)
281 if self.programme.getAttr("neighboursSchema") in schemaPath:
282 status.setNeighbours()
283 if not status.neighTables:
284 Logger.addMessage("No tables in RequiredNeighbours")
285 continue
286
287 if not any(indexScript in schemaPath for indexScript
288 in (self.sysc.indexScript, self.sysc.nsIndexScript,
289 self.sysc.surveyViewScript)):
290
291 Logger.addMessage(
292 "Creating script %s" % os.path.basename(schemaPath))
293
294 file(schemaPath, 'w').write('')
295
296
297 templateScript = file(templatePath).readlines()
298
299
300 lineNum = 0
301 dataLines = {'main':[]}
302 tables = [('main')]
303 dictFiltData = None
304 dictColData = None
305 dictNeighData = None
306 dictBorData = None
307 tableOrder = [('main', True)]
308 while lineNum < len(templateScript):
309 line = templateScript[lineNum]
310 if line.startswith(("+", "=")):
311
312 oldDropStatus = status.dropTables
313 status.update(line)
314
315
316 if status.splitTables:
317 if any(table not in dataLines for table
318 in status.getAllTables(status.splitTables)):
319
320 for key, _cre in tableOrder:
321 statusForTable.update(
322 extractTables(dataLines[key], status))
323
324
325 file(schemaPath, 'a').writelines(dataLines[key])
326
327 dataLines = status.getAllTables(status.splitTables)
328 tableOrder = status.tableOrder
329
330 if not status.splitTables:
331 if 'main' not in dataLines:
332 for key, _cre in tableOrder:
333 statusForTable.update(
334 extractTables(dataLines[key], status))
335
336
337 file(schemaPath, 'a').writelines(dataLines[key])
338
339 dataLines = {'main':[]}
340 tableOrder = [('main', True)]
341
342 if status.dropTables != oldDropStatus:
343 dlDict = {}
344 curDropStatus = status.dropTables
345 status.dropTables = oldDropStatus
346 for key, _cre in tableOrder:
347 statusForTable.update(
348 extractTables(dataLines[key], status))
349
350 file(schemaPath, 'a').writelines(dataLines[key])
351 dlDict[key] = []
352 dataLines = dlDict
353 status.dropTables = curDropStatus
354
355 if status.useFilters and status.writeLine:
356 if not dictFiltData:
357 dictFiltData, dictOrder = \
358 status.initialise('filter', dataLines)
359
360 if dictFiltData and not status.useFilters:
361 for key in dataLines:
362 filtDataList = []
363 for filtName in dictOrder:
364 filtDataList.extend(dictFiltData[key][filtName])
365 dataLines[key].extend(filtDataList)
366 dictFiltData = None
367
368 if status.useColours and status.writeLine:
369 if not dictColData:
370 dictColData, dictOrder = \
371 status.initialise('colour', dataLines)
372
373 if dictColData and not status.useColours:
374 for key in dataLines:
375 colDataList = []
376 for colName in dictOrder:
377 colDataList.extend(dictColData[key][colName])
378
379 dataLines[key] += colDataList
380
381 dictColData = None
382
383 if status.useBorders and status.writeLine:
384 if not dictBorData:
385 dictBorData, dictOrder = \
386 status.initialise('border', dataLines)
387
388 if dictBorData and not status.useBorders:
389 for key in dataLines:
390 borDataList = []
391 for borName in dictOrder:
392 borDataList.extend(dictBorData[key][borName])
393
394 dataLines[key] += borDataList
395
396 dictBorData = None
397
398 if status.useNeighs and status.writeLine:
399
400 if not dictNeighData:
401 dictNeighData, dictOrder = status.initialise(
402 'neigh', dataLines)
403
404 if dictNeighData and not status.useNeighs:
405 for key in dataLines:
406 neighDataList = []
407 for neighName in dictOrder:
408 if neighName in dictNeighData[key]:
409 neighDataList.extend(
410 dictNeighData[key][neighName])
411
412 dataLines[key] += neighDataList
413
414 dictNeighData = None
415
416
417 lineNum += 1
418 continue
419
420 if not line.startswith("*"):
421 tables = ['main']
422 else:
423 tables = status.splitTables.get(line[2])
424 if tables is not None:
425 line = line[4:]
426 else:
427 Logger.addMessage("<Info> Table set %s does not have "
428 "option %s available for programme %s"
429 % (status.splitTables, line[2],
430 self.programme.getAcronym().upper()),
431 alwaysLog=False)
432
433 lineNum += 1
434 continue
435
436 if status.writeLine:
437
438
439 line = utils.multiSub(line, stdSubs)
440
441
442
443 if '&filterlist' in line:
444 listType = line.split('&filterlist:')[1].split('&')[0]
445 filterList = status.filterGroups[listType]
446 line = line.replace('&filterlist:%s&' % listType,
447 ', '.join(filterList).title())
448 if '$[' in line:
449 line = status.complexSub(line)
450
451
452 if status.useFilters:
453
454 for ii, shtName in enumerate(status.filters):
455 subs = [("&A&", shtName.upper()),
456 ("&a&", shtName.lower()),
457 ("&As&", shtName.title()),
458 ("&n&", 'n' if ii > 0 else ''),
459 ("&MagUCD&", (";%s" %
460 bandUcdDict[shtName.lower()]
461 if shtName.lower() in bandUcdDict else ""))]
462
463 fline = utils.multiSub(line, subs)
464 for table in tables:
465 dictFiltData[table][shtName].append(fline)
466
467 lineNum += 1
468 continue
469
470 elif status.useColours:
471
472 for shtName1, shtName2 in status.colours:
473 colUcd = ""
474 colUcd += (";%s" % bandUcdDict[shtName1.lower()]
475 if shtName1.lower() in bandUcdDict else "")
476 colUcd += (";%s" % bandUcdDict[shtName2.lower()]
477 if shtName2.lower() in bandUcdDict else "")
478 colName = '%sm%s' % (shtName1, shtName2)
479 subs = [("&A&", shtName1.upper()),
480 ("&a&", shtName1.lower()),
481 ("&As&", shtName1.title()),
482 ("&B&", shtName2.upper()),
483 ("&b&", shtName2.lower()),
484 ("&Bs&", shtName2.title()),
485 ("&ColorUCD&", colUcd)]
486
487 fline = utils.multiSub(line, subs)
488 for table in tables:
489 dictColData[table][colName].append(fline)
490
491 lineNum += 1
492 continue
493
494 elif status.useBorders:
495 for border in status.borders:
496 subs = [("&A&", border.upper()),
497 ("&a&", border.lower())]
498 fline = utils.multiSub(line, subs)
499 for table in tables:
500 dictBorData[table][border].append(fline)
501 lineNum += 1
502 continue
503
504 elif status.useNeighs:
505 for table in tables:
506 for ii, nTable in \
507 enumerate(status.neighs[table]['neightable']):
508
509 matchDist = round(3600
510 * status.neighs[table]['matchdist'][ii], 1)
511
512
513
514 subs = [("&neighTable&", nTable),
515 ("&MatchDist&", str(matchDist)),
516 ("&PrimeTable&",
517 status.neighs[table]['primetable'][ii]),
518 ("&PrimeID&",
519 str(status.neighs[table]['primeid'][ii]))
520 ]
521
522 fline = utils.multiSub(line, subs)
523 if 'externtable' in status.neighs[table]:
524 fline = fline.replace("&ExternTable&",
525 status.neighs[table]['externtable'][ii])
526
527 if 'externname' in status.neighs[table]:
528 fline = fline.replace("&Extern&",
529 status.neighs[table]['externname'][ii])
530
531 if 'externid' in status.neighs[table]:
532 fline = fline.replace("&ExternID&",
533 str(status.neighs[table]['externid'][ii]))
534
535 if 'externdb' in status.neighs[table]:
536 fline = fline.replace("&ExternDB&",
537 status.neighs[table]['externdb'][ii])
538
539 if 'extprogid' in status.neighs[table]:
540 extProg = self.programme.getAcronym(progID=
541 int(status.neighs[table]['extprogid'][ii]))
542
543 fline = fline.replace("&SecProg&",
544 extProg.upper())
545
546 if (not status.maxIndex or status.maxIndex > ii)\
547 and (not status.checkSpecNeigh or
548 table == 'XMatch' and
549 status.checkSpecNeigh.upper() in
550 status.neighs[table]['externname'][ii]
551 .upper()):
552
553 dictNeighData[table][nTable].append(fline)
554
555 lineNum += 1
556 continue
557
558 isPublicForTable = dict(status.tableOrder)
559 for table in tables:
560
561 fline = line.replace('&split&', table)
562 if "&fluxTable&" in fline:
563 fline = fline.replace('&fluxTable&',
564 self.programme.getDetectionTable() + '.'
565 if table in ("Photometry", "Astrometry") else '')
566 dataLines[table].append(fline.replace('&createTable&',
567 "CREATE TABLE" if isPublicForTable[table] else
568 "create table"))
569
570
571
572 lineNum += 1
573
574 for key, _cre in status.tableOrder:
575
576 statusForTable.update(extractTables(dataLines[key], status))
577 file(schemaPath, 'a').writelines(dataLines[key])
578
579 return statusForTable
580
581
582
596
597
598
600 """
601 Check available disks to see what the maximum releaseNum is for products
602 on the main disks, for the given programme.
603
604 @return: Highest release number in use in the disk file system.
605 @rtype: int
606
607 """
608
609
610
611
612 dirDict = {'stack': self.sysc.stackDir, 'mosaic': self.sysc.mosaicDir}
613 if self.sysc.hasOffsetPos:
614 dirDict['tile'] = self.sysc.tileDir
615
616 progStr = "%05d" % self.programme.getAttr("programmeID")
617 fileNameParts = [progStr, self.sysc.deepSuffix, self.sysc.stackSuffix,
618 self.sysc.mefType]
619
620 notFileNameParts = [self.sysc.confSuffix, self.sysc.catType,
621 self.sysc.catSuffix, self.sysc.filtSuffix]
622
623 releaseNums = []
624 for prodType in self.sysc.productTypes:
625 for disk in self.sysc.availableRaidFileSystem():
626 productDir = os.path.join(disk, dirDict[prodType])
627 if os.path.exists(productDir):
628
629 subDirs = [subDir for subDir in os.listdir(productDir)
630 if subDir.split('_')[0].isdigit()
631 and subDir.startswith('20')]
632
633 for subDir in subDirs:
634 subDirPath = os.path.join(productDir, subDir)
635
636
637 for fileName in os.listdir(subDirPath):
638 filePathName = os.path.join(subDirPath, fileName)
639
640 if (all(part in filePathName
641 for part in fileNameParts)
642 and all(part not in filePathName
643 for part in notFileNameParts)):
644
645
646 if fileName.split('_')[1][:5] == progStr:
647 releaseNum = int(subDir.split("_v")[1])
648 releaseNums.append(int(releaseNum))
649
650 return max(releaseNums) if releaseNums else 0
651
652
653
655 """
656 Queries database to find the next available release number for a given
657 survey.
658
659 @return: Next available release number.
660 @rtype: int
661
662 """
663 progID = self.programme.getAttr("programmeID")
664
665 if self.programme.getAttr("sourceProdType") == 'mosaic':
666
667 relNum = self.archive.queryAttrMax("releaseNum", "ExternalProduct",
668 where="productType='mosaic' AND programmeID=%s" % progID)
669
670 if not relNum:
671 raise ProgrammeBuilder.CuError(
672 "There are no existing mosaics ready for curation. "
673 "Please set up ExternalProduct and re-run.")
674
675 return relNum
676
677 diskRelNum = self.getMaxReleaseOnDisk()
678 dbRelNum = self.archive.queryAttrMax("releaseNum", "ProgrammeFrame",
679 where="productID>0 AND programmeID=%s" % progID) or 0
680
681 return max(diskRelNum, dbRelNum) + 1
682
683
684
701
702
703
705 """ Updates the Programme table entries for new programmes.
706 """
707 self.onlyNonSurveys = True
708 Logger.addMessage("Checking for new non-survey programmes...")
709
710 newProgs = self.archive.query("programmeID, dfsIdString", "Programme",
711 whereStr="programmeID >= %s AND detectionTable = %r" %
712 (dbc.nonSurveyProgrammeOffset(), dbc.charDefault()))
713
714 Logger.addMessage("found %s new programmes." % len(newProgs))
715
716 for progID, dfsIdString in sorted(newProgs):
717
718 if self.sysc.isWSA():
719 acronym = dfsIdString.replace('/', '').lower()
720 else:
721 acronym = dfsIdString.translate(None, '.-()').lower()
722 if acronym.startswith('2'):
723 acronym = "d%s" % acronym
724 elif acronym.startswith('3'):
725 acronym = "s%s" % acronym
726 elif acronym[:3].isdigit():
727 acronym = "n%s" % acronym
728
729
730
731 Logger.addMessage("Updating Programme for " + acronym)
732 entries = [("detectionTable", repr(acronym + "Detection")),
733 ("catalogueSchema", "'NonSurvey/%sNS%s_%sSchema.sql'" %
734 (self.sysc.loadDatabase, acronym, acronym))]
735
736 rowIndex = [("programmeID", progID)]
737 self.archive.updateEntries("Programme", entries, rowIndex)
738 self.programmeID.add(progID)
739
740
741
742 self.archive.insertData("ProgrammeTable",
743 [progID, 1, acronym + "Detection", "objID"])
744
745 if self.programmeID:
746 self.programme.updateDetails()
747
748
749
751 """
752 Set up programme using the latest quality controlled data.
753
754 @return: True if the schema now needs updating.
755 @rtype: bool
756
757 """
758 pAcronym = self.programme.getAcronym()
759 Logger.addMessage(
760 "Determining best setup for programme %s..." % pAcronym.upper())
761
762 filterTable = df.Table("Filter", self.archive)
763 newSetup = BestSetup(self.programme, self.redoSetup, self.dateRange,
764 self.displayDeeps)
765 synSetup = SynopticSetup(self.programme, self.dateRange,
766 newSetup.isSurveyDeep, self.bandMergeCrit)
767
768 if self.programme.isNonSurvey():
769 Logger.addMessage(
770 "RequiredFilters are:\n"
771 "| filter | isSynoptic |\n" + '\n'.join(
772 "| %6s | %3s |"
773 % (filterTable.getAttr("shortName", filterID=filterID),
774 ("Y" if isSynoptic else "N"))
775 for filterID, nPass, isSynoptic in newSetup.newFiltData))
776
777 Logger.addMessage("<Info> %s is a %s survey" % (pAcronym.upper(),
778 ("DEEP" if newSetup.isSurveyDeep else "SHALLOW")))
779
780
781 if self.programmeID in self.sysc.manualProgs:
782 Logger.addMessage(
783 "<Info> The data suggest the following filter setup:\n%s\n"
784 "and the following pointing setup:\n%s"
785 % (newSetup.newFiltData, newSetup.newPosData))
786
787 if not self.archive.isTrialRun:
788 raise ProgrammeBuilder.CuError(
789 "This is a UKIDSS programme, so requirements will not be "
790 "overwritten.")
791
792 return False
793
794
795 reqTables = []
796 for pType in newSetup.productData:
797 if len(newSetup.productData[pType]):
798 reqTables.append("Required%s" % pType.title())
799 Logger.addMessage("%s %ss will be inserted into Required%s"
800 % (len(newSetup.productData[pType]),
801 pType.lower(), pType.title()))
802
803 if newSetup.productLinks:
804 reqTables += ["ProductLinks"]
805
806 Logger.addMessage("%s links will be inserted into ProductLinks"
807 % len(newSetup.productLinks))
808
809 if self.programme.isNonSurvey():
810 reqTables.append("RequiredFilters")
811
812
813
814
815
816 if self.redoSetup:
817 Logger.addMessage(
818 "Wiping old programme requirements from database...")
819
820 for tableName in reqTables:
821 self.archive.delete(tableName,
822 whereStr="programmeID=%s" % self.programmeID)
823 else:
824 Logger.addMessage("Wiping ProductLinks from database...")
825 self.archive.delete("ProductLinks",
826 whereStr="programmeID=%s" % self.programmeID)
827
828 Logger.addMessage("Updating %s..." % ", ".join(reqTables))
829
830
831 for filterID, nPass, isSynoptic in newSetup.newFiltData:
832 if self.programme.isNonSurvey():
833 self.archive.insertData("RequiredFilters",
834 [self.programmeID, filterID, nPass, isSynoptic])
835 else:
836
837 self.archive.updateEntries("RequiredFilters",
838 entryList=[("isSynoptic", isSynoptic)],
839 rowIndexList=[("programmeID", self.programmeID),
840 ("filterID", filterID)])
841
842
843 for point in newSetup.productData["stack"]:
844 flName = filterTable.getAttr("shortName", filterID=point.filterID)
845 nameStr = "%s_%s_%s" % (pAcronym, point.fieldID, flName.title())
846 desc = ("Deep %s stack: %s, pointing %11.7f, %11.7f %s, nustep %d"
847 % (pAcronym, nameStr, point.ra, point.dec, flName, point.nuStep)
848 if self.sysc.hasMicroStep else
849 "Deep %s stack: %s, pointing %11.7f, %11.7f %s"
850 % (pAcronym, nameStr, point.ra, point.dec, flName))
851 entryList = [self.programmeID, point.productID, nameStr, desc,
852 point.filterID, point.ra, point.dec, self.numberStks]
853 if self.sysc.hasMicroStep:
854 entryList.append(point.nuStep)
855
856 if self.sysc.hasOffsetPos:
857 entryList.append(point.offsetPos)
858
859 entryList += [point.stackRadius, point.fieldID]
860 if self.sysc.hasVarOrient:
861
862 entryList.append(point.posAngle)
863
864 self.archive.insertData("RequiredStack", entryList)
865
866 for point in newSetup.productData["tile"]:
867 flName = filterTable.getAttr("shortName", filterID=point.filterID)
868 nameStr = "%s_%s_%s" % (pAcronym, point.fieldID, flName.title())
869 desc = "Deep %s tile: %s, pointing %11.7f, %11.7f %s, nustep %d" \
870 % (pAcronym, nameStr, point.ra, point.dec, flName, point.nuStep)
871
872 entryList = [self.programmeID, point.productID, nameStr, desc,
873 point.filterID, point.ra, point.dec, point.nuStep,
874 point.stackRadius, point.fieldID, point.posAngle]
875
876 self.archive.insertData("RequiredTile", entryList)
877
878 for point in newSetup.productData["mosaic"]:
879 flName = filterTable.getAttr("shortName", filterID=point.filterID)
880 nameStr = "%s_%s_%s" % (pAcronym, point.fieldID, flName.title())
881 desc = ("Deep %s mosaic: %s, pointing %11.7f, %11.7f %s, "
882 "pixel size %3.2f, size %3.2f x %3.2f sq. degrees" \
883 % (pAcronym, nameStr, point.ra, point.dec, flName,
884 point.pixelSize, point.raExtent, point.decExtent))
885
886 dualFilterID = 5
887
888
889
890 entryList = [self.programmeID, point.productID, nameStr, desc,
891 point.filterID, point.ra, point.dec, point.raExtent,
892 point.decExtent, point.nuStep, point.pixelSize, dualFilterID,
893 point.fieldID]
894 if self.sysc.hasVarOrient:
895 entryList.append(point.posAngle)
896
897 self.archive.insertData("RequiredMosaic", entryList)
898
899 for combiProdID, intProdID, combiProdType, intProdType \
900 in newSetup.productLinks:
901
902 entryList = [self.programmeID, combiProdID, intProdID,
903 combiProdType, intProdType]
904
905 self.archive.insertData("ProductLinks", entryList)
906
907 if self.newPoints:
908 return False
909
910
911 Logger.addMessage("Parsing registration files...")
912 registration = Registration(self.programme)
913
914
915 entryList = []
916 if self.programme.isNonSurvey():
917 entryList += [
918 ("description", "'%s'" % registration.description),
919 ("extractTool", "'CASU'"),
920 ("sourceTable", "'%sSource'" % pAcronym),
921 ("mergeLogTable", "'%sMergeLog'" % pAcronym),
922 ("neighboursSchema", "'NonSurvey/%sNS%s_%sNeighbourSchema.sql'"
923 % (self.sysc.loadDatabase, pAcronym, pAcronym)),
924 ("isLowGalacticLat", int(newSetup.isLowGalLat))]
925
926
927
928
929 self.bandMergeCrit = (self.bandMergeCrit
930 or self.programme.getAttr("bandMergingCriterion"))
931
932
933 if newSetup.isSurveyDeep:
934 synNeighTableDetails = []
935 for ngTable in synSetup.neighTables:
936 synID = synSetup.getSynopticTableID(ngTable)
937 synNeighTableDetails.append((ngTable, synID))
938
939 if synSetup.bestMatchTables:
940 entryList += [
941 ("synopticBestMatchTable", repr(synSetup.varBMTable)),
942 ("variabilityTable", "'%sVariability'" % pAcronym)]
943
944 if synSetup.bandMergingCrit:
945 entryList += [
946 ("synopticSourceTable", repr(
947 synSetup.getSynopticSourceTable())),
948 ("bandMergingCriterion", synSetup.bandMergingCrit)]
949
950 else:
951 entryList += [("synopticBestMatchTable", repr(dbc.charDefault())),
952 ("variabilityTable", repr(dbc.charDefault())),
953 ("synopticSourceTable", repr(dbc.charDefault()))]
954
955
956 Logger.addMessage("Updating Programme with " +
957 ("deep table information" if not self.programme.isNonSurvey() else
958 "table names and neighbour schema") +
959 " for programme %s" % pAcronym.upper())
960
961 self.archive.update("Programme", entryList,
962 where="programmeID=%s" % self.programmeID)
963
964 self.programme.updateDetails()
965
966
967 if self.programme.isNonSurvey() \
968 and not self.archive.queryEntriesExist("Survey",
969 where="surveyID=%s" % self.programmeID):
970
971 Logger.addMessage(pAcronym.upper() + " is a newly registered "
972 "programme. Adding entries into Survey, SurveyProgrammes and "
973 "ProgrammeTable...")
974
975
976 self.archive.insertData("Survey", rowData=[self.programmeID,
977 self.programme.getAttr("dfsIDString"), "%s: %s" %
978 (self.programme.getName().replace("programme", "project"),
979 registration.description), pAcronym.upper(), dbc.yes()])
980
981
982 self.archive.insertData("SurveyProgrammes",
983 [self.programmeID, self.programmeID, dbc.no()])
984
985
986 if self.sysc.isWSA():
987 self.archive.insertData("SurveyProgrammes",
988 [dbc.worldSurveyID(), self.programmeID, dbc.yes()])
989
990
991 self.archive.insertData("ProgrammeTable", [self.programmeID, 2,
992 self.programme.getSourceTable(), "sourceID"])
993
994
995 if synSetup.bandMergingCrit:
996 synID = 3
997 if not self.archive.queryEntriesExist("ProgrammeTable",
998 where="programmeID=%s AND tableID=%s" % (self.programmeID, synID)):
999
1000 Logger.addMessage(pAcronym.upper() + " is now a correlated "
1001 "survey. Adding SynopticSource table entry "
1002 "into ProgrammeTable...")
1003
1004 self.archive.insertData("ProgrammeTable", [
1005 self.programmeID, synID,
1006 synSetup.getSynopticSourceTable(), "synopticID"])
1007
1008 Logger.addMessage("Updating RequiredNeighbours...")
1009 joinCriterion = 10.0 / 3600.0
1010
1011 if self.programme.isNonSurvey():
1012
1013
1014
1015 twomassID = self.getExternalSurveyID('TWOMASS')
1016 ssaID = self.getExternalSurveyID('SSA')
1017 wiseAssID = self.getExternalSurveyID('WISE')
1018 neighbourEntries = [
1019 [self.programmeID, 2, dbc.intDefault(), dbc.intDefault(), joinCriterion,
1020 "%sSourceNeighbours" % pAcronym, dbc.intDefault()],
1021 [self.programmeID, 2, twomassID, 1, joinCriterion,
1022 "%sSourceXtwomass_psc" % pAcronym, dbc.intDefault()],
1023 [self.programmeID, 2, ssaID, 1, joinCriterion,
1024 "%sSourceXSSASource" % pAcronym, dbc.intDefault()],
1025 [self.programmeID, 2, wiseAssID, 2, joinCriterion,
1026 "%sSourceXwise_allskysc" % pAcronym, dbc.intDefault()],
1027 ]
1028 if self.sysc.isWSA():
1029
1030 latestSDSSsID = self.archive.queryAttrMax("surveyID", "ExternalSurvey",
1031 "surveyName like '%SDSS%-DR%'")
1032 surveyName, extTableName, tableID = self.archive.query(
1033 "surveyName,extTableName,extTableID",
1034 "ExternalSurveyTable as t, ExternalSurvey as s",
1035 "s.surveyID=t.surveyID and s.surveyID=%s and "
1036 "extTableName='PhotoObj'" % latestSDSSsID, firstOnly=True)
1037 neighTableName = self.getSDSSNeighTable(surveyName, extTableName)
1038 neighbourEntries += [[self.programmeID, 2, latestSDSSsID, tableID, joinCriterion,
1039 neighTableName, dbc.intDefault()]]
1040 for reqNeigh in neighbourEntries:
1041 if not self.archive.queryEntriesExist("RequiredNeighbours",
1042 "programmeID=%s and tableID=2 and surveyID=%s and extTableID=%s and extProgID=%s"
1043 % (self.programmeID, reqNeigh[2], reqNeigh[3], reqNeigh[6])):
1044
1045 self.archive.insertData("RequiredNeighbours",
1046 reqNeigh)
1047
1048
1049 self.archive.delete("RequiredNeighbours",
1050 whereStr="programmeID=extProgID AND programmeID=%s" % self.programmeID
1051 + " AND (neighbourTable LIKE '%XDetection'"
1052 " OR neighbourTable LIKE '%XSynopticSource')")
1053
1054
1055 if newSetup.isSurveyDeep:
1056 for synNeighTable, synID in synNeighTableDetails:
1057 self.archive.insertData("RequiredNeighbours",
1058 [self.programmeID, 2, dbc.intDefault(), synID,
1059 joinCriterion, synNeighTable, self.programmeID])
1060
1061 return True
1062
1063
1064 - def getExternalSurveyID(self, databaseName):
1065 """
1066 """
1067 return self.archive.query("surveyID", "ExternalSurvey", "databaseName='%s'"
1068 % databaseName, firstOnly=True, default=dbc.intDefault())
1069
1070
1072 """ returns name of neighbour table between programme and SDSS survey
1073 """
1074 surveyNameParts = surveyName.split('-')
1075 survName = ''.join([part.title() for part in surveyNameParts
1076 if part.upper() != 'SDSS'])
1077 return "%sX%s%s" % (self.programme.getSourceTable(), survName, extTableName)
1078
1079
1081 """This translates schema template grammar into a current status
1082 """
1083 actionDict = {
1084 '++':'start',
1085 '+-':'startNot',
1086 '==':'finish',
1087 '=-':'finishNot'}
1088
1089 methodDict = {
1090 'p':'specificProgrammes',
1091 'r':'repeatTable',
1092 'x':'sextractorValues',
1093 'h':'highGalacticLat',
1094 'd':'synTables',
1095 'f':'cycleThroughFilters',
1096 'c':'cycleThroughColours',
1097 's':'sourceTables',
1098 't':'dropTables',
1099 'e':'externalTable',
1100 'n':'cycleThroughNeighs',
1101 'o':'onceOnly',
1102 'b':'cycleThroughBorders',
1103 }
1104
1105 colourGroups = None
1106 filterGroups = None
1107 borderGroups = None
1108 progGroups = None
1109
1110 splitTables = None
1111
1112 useProgSP = True
1113 useProgSX = True
1114 useProgHL = True
1115 useProgSY = True
1116 useProgSR = True
1117 useProgFO = True
1118 checkSpecNeigh = None
1119 writeLine = True
1120 dropTables = False
1121
1122 filters = None
1123 colours = None
1124 borders = None
1125 useColours = False
1126 useFilters = False
1127 useBorders = False
1128 tableOrder = [('main', True)]
1129 neighView = None
1130 neighs = None
1131 useNeighs = False
1132 lastIndex = None
1133 maxIndex = None
1134 progIndex = None
1135
1136
1137 archive = None
1138
1139 programme = None
1140
1141 sysc = None
1142
1143 synSetup = None
1144
1145
1146
1147 - def __init__(self, cu, progIDs, synSetup):
1148 """
1149 Initialise status from current CuSession task.
1150
1151 @param cu: Curation task.
1152 @type cu: CuSession
1153 @param progIDs: List of programme IDs being processed.
1154 @type progIDs: list(int)
1155
1156 """
1157 self.archive = cu.archive
1158 self.programme = cu.programme
1159 self.sysc = cu.sysc
1160 self.synSetup = synSetup
1161 programmeID = self.programme.getAttr("programmeID")
1162 self.progIndex = progIDs.index(programmeID)
1163 self.lastIndex = len(progIDs) - 1
1164 surveyIDs = self.programme.getProgIDList(onlySurvey=True)
1165 nonSurveyIDs = self.programme.getProgIDList(onlyNonSurvey=True)
1166 minSurveyProgID = min(self.sysc.scienceProgs.values())
1167 self.progGroups = {
1168 'early': [progID for progID in surveyIDs if progID < minSurveyProgID],
1169 'ps': [progID for progID in surveyIDs if progID >= minSurveyProgID],
1170 'ns': nonSurveyIDs,
1171 'allProgs': surveyIDs + nonSurveyIDs}
1172
1173
1174 allFilters = \
1175 list(df.PassbandList(self.programme).getPassbandsLowerCase())
1176
1177 bbFilters = df.PassbandList(self.programme).getBroadbandPassbands()
1178
1179 synFilters = list(df.PassbandList(self.programme, isSynoptic=True)
1180 .getPassbandsLowerCase())
1181
1182 bbSynFilters = df.PassbandList(self.programme, isSynoptic=True)\
1183 .getBroadbandPassbands()
1184
1185
1186 self.filterGroups = {'a': allFilters, 's': synFilters}
1187
1188 if self.sysc.hasOffsetPos:
1189 offSetMax = self.archive.queryAttrMax("noffsets",
1190 table=Join(["Multiframe", "ProgrammeFrame"], ["multiframeID"]),
1191 where="frameType LIKE '%tile%stack' AND " +
1192 DepCodes.selectNonDeprecated) or 6
1193
1194 self.filterGroups['o'] = \
1195 ['o%d' % off for off in range(1, offSetMax + 1)]
1196
1197 l1 = bbFilters[:-1]
1198 l2 = bbFilters[1:]
1199 for fName in allFilters:
1200 if fName not in bbFilters:
1201 bbF = queries.getBBFilter(self.archive, fName, programmeID)
1202 if bbF:
1203 l1.append(fName)
1204 l2.append(bbF.lower())
1205
1206
1207 self.colourGroups = {
1208 'a': list(zip(l1, l2)),
1209 'sb': list(zip(bbSynFilters[:-1], bbSynFilters[1:]))}
1210
1211 posList = []
1212 for edge in ['detEdge', 'dithEdge']:
1213 for bound in ['Inner', 'Outer']:
1214 for pos in range(4):
1215 posList.append('%s%sPos%d' % (edge, bound, pos + 1))
1216
1217 self.borderGroups = {'io': posList}
1218
1219
1220
1222 """
1223 """
1224
1225 for _substitution in range(len(line.split('$[')) - 1):
1226 subString = '$[' + line.split('$[')[1].split('$')[0] + '$'
1227 repeatString = subString.split('[')[1].split(']')[0]
1228 joinStr = subString.split(';')[1]
1229 methodArg = subString.split(';')[2][0]
1230 addInfo = \
1231 subString.split(';')[2].split(':')[1].split('$')[0].split(',')
1232
1233 method = self.methodDict[methodArg]
1234 if method == 'cycleThroughFilters':
1235 repeats = []
1236 for ii, shtName in enumerate(self.filterGroups[addInfo[0]]):
1237 subs = [("&A&", shtName.upper()),
1238 ("&a&", shtName.lower()),
1239 ("&As&", shtName.title()),
1240 ("&n&", 'n' if ii > 0 else '')]
1241 repeats.append(utils.multiSub(repeatString, subs))
1242 line = utils.multiSub(line, [(subString, joinStr.join(repeats))])
1243 elif method == 'cycleThroughColours':
1244 repeats = []
1245 for shtName1, shtName2 in self.colourGroups[addInfo[0]]:
1246 subs = [("&A&", shtName1.upper()),
1247 ("&a&", shtName1.lower()),
1248 ("&As&", shtName1.title()),
1249 ("&B&", shtName2.upper()),
1250 ("&b&", shtName2.lower()),
1251 ("&Bs&", shtName2.title())]
1252 repeats.append(utils.multiSub(repeatString, subs))
1253 line = utils.multiSub(line, [(subString, joinStr.join(repeats))])
1254 elif method == 'cycleThroughBorders':
1255 repeats = []
1256 for border in self.borderGroups[addInfo[0]]:
1257 subs = [("&A&", border.upper()),
1258 ("&a&", border.lower())]
1259 repeats.append(utils.multiSub(repeatString, subs))
1260 line = utils.multiSub(line, [(subString, joinStr.join(repeats))])
1261 elif method == 'specificProgrammes':
1262 self.addInfo = addInfo
1263 if self.isProgMatch():
1264 line = utils.multiSub(line, [(subString, repeatString)])
1265 else:
1266 line = utils.multiSub(line, [(subString, joinStr)])
1267 else:
1268 raise ProgrammeBuilder.CuError("Schema inconsistency")
1269 return line
1270
1271
1272
1274 """
1275 """
1276
1277
1278
1279
1280
1281 viewStringN = ("neighTable,matchDist,primeTable,primeID")
1282 viewStringX = viewStringN + (",externName,externDb,externTable,externID")
1283 viewStringS = viewStringN + (",externTable,externID")
1284 viewStringP = viewStringS + (",extProgID,externName")
1285 selectStringN = ("r.neighbourTable,r.joinCriterion,pt.tableName,"
1286 "pt.sourceIDName")
1287 selectStringX = selectStringN + (",es.surveyName,es.databaseName,"
1288 "est.extTableName,est.sourceIDName")
1289 selectStringS = selectStringN + (",pt2.tableName,pt2.sourceIDName")
1290 selectStringP = selectStringS + (",p.programmeID,p.title")
1291
1292 fromStringN = ("RequiredNeighbours as r,ProgrammeTable as pt")
1293 fromStringX = fromStringN + (",ExternalSurvey as es,"
1294 "ExternalSurveyTable as est")
1295 fromStringS = fromStringN + (",ProgrammeTable as pt2")
1296 fromStringP = fromStringS + (",Programme as p")
1297
1298 whereStringN = ("r.programmeID=%d and r.programmeID=pt.programmeID and "
1299 "r.tableID=pt.tableID" % self.programme.getAttr("programmeID"))
1300 whereStringX = whereStringN + (" and r.surveyID=es.surveyID and "
1301 "r.surveyID=est.surveyID and "
1302 "r.extTableID=est.extTableID ")
1303 whereStringS = whereStringN + (" and r.programmeID=pt2.programmeID and "
1304 "r.extTableID=pt2.tableID")
1305 whereStringP = whereStringN + (" and r.extProgID=pt2.programmeID and "
1306 "r.extTableID=pt2.tableID and "
1307 "p.programmeID=r.extProgID")
1308
1309
1310 isNeigh = self.archive.queryEntriesExist(
1311 fromStringN, whereStringN +
1312 " and r.surveyID<0 and r.extProgID<0")
1313 isXMatch = self.archive.queryEntriesExist(
1314 fromStringX, whereStringX + " and r.surveyID>0")
1315
1316 isSynop = self.archive.queryEntriesExist(
1317 fromStringS, whereStringS + " and r.surveyID<0 and "
1318 "r.extProgID=r.programmeID") and self.programme.isDeep()
1319 isInter = self.archive.queryEntriesExist(
1320 fromStringS, whereStringS + " and r.surveyID<0 and "
1321 "r.extProgID=r.programmeID") and not self.programme.isDeep()
1322 isProg = self.archive.queryEntriesExist(
1323 fromStringP, whereStringP + " and r.surveyID<0 and "
1324 "r.extProgID!=r.programmeID")
1325 self.neighTables = {}
1326 if isNeigh:
1327 self.neighTables['Neigh'] = df.View(viewStringN.split(','),
1328 self.archive.query(selectStringN, fromStringN,
1329 whereStringN +
1330 " and r.surveyID<0 and r.extProgID<0"))
1331 if isXMatch:
1332 self.neighTables['XMatch'] = df.View(viewStringX.split(','),
1333 self.archive.query(selectStringX, fromStringX,
1334 whereStringX + " and r.surveyID>0"))
1335 if isSynop:
1336 self.neighTables['Synop'] = df.View(viewStringS.split(','),
1337 self.archive.query(selectStringS, fromStringS,
1338 whereStringS + " and r.surveyID<0 and "
1339 "r.extProgID=r.programmeID"))
1340 if isInter:
1341 self.neighTables['Inter'] = df.View(viewStringS.split(','),
1342 self.archive.query(selectStringS, fromStringS,
1343 whereStringS + " and r.surveyID<0 and "
1344 "r.extProgID=r.programmeID"))
1345 if isProg:
1346 self.neighTables['Prog'] = df.View(viewStringP.split(','),
1347 self.archive.query(selectStringP, fromStringP,
1348 whereStringP + " and r.surveyID<0 and "
1349 "r.extProgID!=r.programmeID"))
1350 self.neighTypes = self.neighTables.keys()
1351 if self.neighTables:
1352 self.neighTables['All'] = df.View(viewStringN.split(','),
1353 self.archive.query(selectStringN, fromStringN, whereStringN))
1354
1355
1356
1358 """
1359 """
1360 self.parseString(line)
1361
1362
1363
1364
1365
1366
1367 if (self.method == 'repeatTable' and
1368 self.action == 'start'):
1369 if self.addInfo[0] == 'd':
1370 self.tableOrder = [('', True), ('Raw', False), ('Astrometry', False),
1371 ('Photometry', False)]
1372 self.splitTables = {'d':['', 'Raw', 'Astrometry', 'Photometry'],
1373 'r':['', 'Raw'],
1374 'a':['', 'Astrometry'],
1375 'p':['', 'Photometry'],
1376 'D':[''],
1377 'R':['Raw'],
1378 'A':['Astrometry'],
1379 'P':['Photometry'],
1380 'S':['Raw', 'Astrometry', 'Photometry'], }
1381 elif self.addInfo[0] == 's':
1382 self.tableOrder = [('', True), ('Merge', False)]
1383 self.splitTables = {'m':['', 'Merge'],
1384 's':[''], }
1385 elif self.addInfo[0] == 'n':
1386 tableDict = {'Neigh':'n',
1387 'XMatch':'x',
1388 'Synop':'s',
1389 'Inter':'i',
1390 'Prog':'p'}
1391 self.tableOrder = []
1392 self.splitTables = {'a':self.neighTypes}
1393 for table in self.neighTypes:
1394 self.tableOrder.append((table, True))
1395 self.splitTables[tableDict[table]] = [table]
1396 elif (self.method == 'repeatTable' and
1397 self.action == 'finish'):
1398
1399 self.splitTables = None
1400 self.tableOrder = [('main', True)]
1401
1402
1403
1404 if self.method == 'specificProgrammes':
1405 if self.action == 'start':
1406 if ((not self.isProgMatch() and not self.progSubtract) or
1407 (self.isProgMatch() and self.progSubtract)):
1408 self.useProgSP = False
1409 elif self.action == 'startNot':
1410 if ((self.isProgMatch() and not self.progSubtract) or
1411 (not self.isProgMatch() and self.progSubtract)):
1412 self.useProgSP = False
1413 elif self.action == 'finish':
1414 if ((not self.isProgMatch() and not self.progSubtract) or
1415 (self.isProgMatch() and self.progSubtract)):
1416 self.useProgSP = True
1417 elif self.action == 'finishNot':
1418
1419 if ((self.isProgMatch() and not self.progSubtract) or
1420 (self.isProgMatch() and self.progSubtract)):
1421 self.useProgSP = True
1422
1423
1424
1425 if (self.method == 'sourceTables' and
1426 self.action == 'start'):
1427
1428 if self.programme.getSourceTable() == dbc.charDefault():
1429 self.useProgSR = False
1430 elif (self.method == 'sourceTables' and
1431 self.action == 'finish'):
1432
1433 self.useProgSR = True
1434
1435
1436
1437 if (self.method == 'sextractorValues' and
1438 self.action == 'start'):
1439
1440 if not self.programme.useSExtractor():
1441 self.useProgSX = False
1442 elif self.addInfo:
1443
1444 if not self.progSubtract:
1445 raise ProgrammeBuilder.CuError("Schema inconsistency")
1446
1447 if self.isProgMatch():
1448 self.useProgSX = False
1449 elif (self.method == 'sextractorValues' and
1450 self.action == 'startNot'):
1451
1452 if self.programme.useSExtractor():
1453 self.useProgSX = False
1454 elif self.addInfo:
1455
1456 if not self.progSubtract:
1457 raise ProgrammeBuilder.CuError("Schema inconsistency")
1458
1459 if self.isProgMatch():
1460 self.useProgSX = False
1461
1462 elif (self.method == 'sextractorValues' and
1463 (self.action == 'finish' or
1464 self.action == 'finishNot')):
1465
1466 self.useProgSX = True
1467
1468
1469
1470 if (self.method == 'highGalacticLat' and
1471 self.action == 'start'):
1472
1473
1474
1475
1476 if self.programme.isLowGalLat():
1477 self.useProgHL = False
1478 elif self.addInfo:
1479 if not self.progSubtract:
1480 raise ProgrammeBuilder.CuError("Schema inconsistency")
1481 if self.isProgMatch():
1482 self.useProgHL = False
1483
1484 elif (self.method == 'highGalacticLat' and
1485 self.action == 'startNot'):
1486
1487 if not self.programme.isLowGalLat():
1488 self.useProgHL = False
1489
1490 elif self.addInfo:
1491
1492 if not self.progSubtract:
1493 raise ProgrammeBuilder.CuError("Schema inconsistency")
1494 if self.isProgMatch():
1495 self.useProgHL = False
1496
1497 elif (self.method == 'highGalacticLat' and
1498 (self.action == 'finish' or
1499 self.action == 'finishNot')):
1500
1501 self.useProgHL = True
1502
1503
1504
1505 if (self.method == 'synTables' and
1506 self.action == 'start'):
1507
1508 if self.addInfo[0] == 'a':
1509 if not self.programme.isDeep():
1510 self.useProgSY = False
1511 elif self.addInfo[0] == 'c':
1512 if not self.synSetup.bandMergingCrit:
1513 self.useProgSY = False
1514 elif self.addInfo[0] == 'o':
1515 if not (self.synSetup.bandMergingCrit and self.synSetup.isCorSingleEpoch):
1516 self.useProgSY = False
1517 elif self.addInfo[0] == 'm':
1518 if not (self.synSetup.isCorrelBMTable()):
1519 self.useProgSY = False
1520 elif self.addInfo[0] == 'n':
1521 if not (self.synSetup.isNonCorrelBMTable()):
1522 self.useProgSY = False
1523 elif self.addInfo[0] == 'k':
1524 if not (self.synSetup.isVarTableCorrel()):
1525 self.useProgSY = False
1526 elif self.addInfo[0] == 'v':
1527 if not (self.programme.isDeep() and not
1528 self.synSetup.isVarTableCorrel()):
1529 self.useProgSY = False
1530 elif (self.method == 'synTables' and
1531 self.action == 'finish'):
1532
1533 self.useProgSY = True
1534
1535
1536
1537 if (self.method == 'onceOnly' and
1538 self.action == 'start'):
1539 if self.addInfo[0] == 'f':
1540 if not self.progIndex == 0:
1541 self.useProgFO = False
1542 elif self.addInfo[0] == 'l':
1543 if not self.progIndex == self.lastIndex:
1544 self.useProgFO = False
1545 elif (self.method == 'onceOnly' and
1546 self.action == 'finish'):
1547
1548 self.useProgFO = True
1549
1550
1551
1552 if (self.method == 'externalTable' and
1553 self.action == 'start'):
1554 if not self.addInfo:
1555 raise ProgrammeBuilder.CuError("Schema inconsistency")
1556 self.checkSpecNeigh = self.addInfo[0]
1557 if (self.method == 'externalTable' and
1558 self.action == 'finish'):
1559 self.checkSpecNeigh = None
1560
1561
1562
1563 self.writeLine = (self.useProgSY and self.useProgHL and
1564 self.useProgSX and self.useProgSP and
1565 self.useProgSR and self.useProgFO)
1566
1567
1568
1569 if (self.method == 'cycleThroughFilters' and
1570 self.action == 'start'):
1571 self.useFilters = True
1572 self.filters = self.filterGroups[self.addInfo[0]]
1573 elif (self.method == 'cycleThroughFilters' and
1574 self.action == 'finish'):
1575 self.useFilters = False
1576 self.filters = None
1577
1578
1579
1580 if (self.method == 'cycleThroughColours' and
1581 self.action == 'start'):
1582 self.useColours = True
1583 self.colours = self.colourGroups[self.addInfo[0]][:]
1584 if len(self.addInfo) > 1:
1585
1586 self.stripAddCols(','.join(self.addInfo[1:]))
1587
1588 elif (self.method == 'cycleThroughColours' and
1589 self.action == 'finish'):
1590 self.colours = None
1591 self.useColours = False
1592
1593
1594
1595
1596 if (self.method == 'cycleThroughNeighs' and
1597 self.action == 'start'):
1598 if self.addInfo:
1599 if self.addInfo[0] == 'o':
1600 self.maxIndex = 1
1601 self.useNeighs = True
1602 self.neighs = self.neighTables
1603 elif (self.method == 'cycleThroughNeighs' and
1604 self.action == 'finish'):
1605 self.maxIndex = None
1606 self.neighs = None
1607 self.useNeighs = False
1608
1609
1610
1611
1612 if (self.method == 'cycleThroughBorders' and
1613 self.action == 'start'):
1614 self.useBorders = True
1615 self.borders = self.borderGroups[self.addInfo[0]]
1616 elif (self.method == 'cycleThroughBorders' and
1617 self.action == 'finish'):
1618 self.useBorders = False
1619 self.borders = None
1620
1621
1622
1623
1624 if (self.method == 'dropTables' and
1625 self.action == 'start'):
1626 if self.isProgMatch():
1627 self.dropTables = True
1628 elif (self.method == 'dropTables' and
1629 self.action == 'finish'):
1630
1631 if self.isProgMatch():
1632 self.dropTables = False
1633
1634
1635
1646
1647
1648
1650 """
1651 """
1652 self.action = self.actionDict[line[0:2]]
1653 self.method = self.methodDict[line[2]]
1654 self.progSubtract = False
1655 self.addInfo = None
1656 if line[3] == ":":
1657 if line[4] == '-':
1658 self.progSubtract = True
1659 self.addInfo = line[5:].split()[0].split(',')
1660 else:
1661 self.addInfo = line[4:].split()[0].split(',')
1662
1663
1664
1670
1671
1672
1707
1708
1709
1711 """
1712 Parses a string with additional colour info and edits colour group
1713 accordingly.
1714
1715 """
1716
1717 addColsInd = addColInfo.find('+')
1718 subColsInd = addColInfo.find('-')
1719 if addColsInd < subColsInd:
1720 parts = addColInfo.split('-')
1721 if addColsInd >= 0:
1722 newCols = parts[0]
1723 remCols = parts[1]
1724 else:
1725 newCols = None
1726 remCols = parts[1]
1727 else:
1728 parts = addColInfo.split('+')
1729 if subColsInd >= 0:
1730 remCols = parts[0]
1731 newCols = parts[1]
1732 else:
1733 remCols = None
1734 newCols = parts[1]
1735
1736 for ii, subString in enumerate([newCols, remCols]):
1737 if subString:
1738 colList = subString.split(',')
1739
1740
1741 colList = self.testColours(colList)
1742
1743
1744 if ii == 0:
1745 self.colours.extend(tuple(col.split('m')) for col in colList)
1746 else:
1747 oldCols = ['m'.join(cl) for cl in self.colours]
1748 [oldCols.remove(col) for col in colList
1749 if col in oldCols]
1750 self.colours = [tuple(col.split('m')) for col in oldCols]
1751
1752
1753
1755 """
1756 Tests whether filters that make up colours are available for this
1757 programme.
1758
1759 """
1760 finalColList = []
1761 for col in colList:
1762 filComb = col.split('m')
1763 if (filComb[0] in self.filterGroups['a'] and
1764 filComb[1] in self.filterGroups['a']):
1765 finalColList.append(col)
1766
1767 return finalColList
1768
1769
1770
1772 """ Programme requirements from registration file.
1773 """
1774 comments = ''
1775 description = dbc.charDefault()
1776 email = ''
1777 expectedFilters = None
1778 password = ''
1779 piName = ''
1780
1781
1782
1784 """
1785 Parses registration files, if not already done so, and sets registered
1786 requirements for the given programme.
1787
1788 @param progTable: Programme table for current database with the
1789 current row set to that of the desired programme.
1790 @type progTable: df.ProgrammeTable
1791
1792 """
1793 self.expectedFilters = []
1794 if not progTable.isNonSurvey():
1795
1796 self.description = progTable.getAttr("description")
1797 return
1798
1799 programme = progTable.getAttr("dfsIdString")
1800 programmeName = progTable.getAcronym().upper()
1801 sysc = progTable._db.sysc
1802
1803 foundReg = False
1804 for fileName in os.listdir(sysc.nonSurveyRegPath()):
1805 if '.reg' in fileName:
1806 for line in utils.ParsedFile(sysc.nonSurveyRegPath(fileName)):
1807 if not foundReg \
1808 and line.startswith("programme=" + programme):
1809
1810 foundReg = True
1811
1812 elif not foundReg and line.startswith("programmeName="):
1813 if line.endswith(programmeName):
1814 foundReg = True
1815 else:
1816 break
1817
1818 elif foundReg and line.startswith("title="):
1819 self.description = line.replace("title=", "")
1820
1821 elif foundReg and line.startswith("firstName="):
1822 self.piName = line.replace("firstName=", "")
1823
1824 elif foundReg and line.startswith("lastName="):
1825 self.piName += line.replace("lastName=", " ")
1826
1827 elif foundReg and line.startswith("email="):
1828 self.email = line.replace("email=", "")
1829
1830 elif foundReg and line.startswith("password="):
1831 self.password = line.replace("password=", "")
1832
1833 elif foundReg and "num=" in line:
1834 self.expectedFilters.append(line.split("num="))
1835
1836 elif foundReg and line.startswith("comments="):
1837 self.comments = line.replace("comments=", "")
1838
1839 if foundReg:
1840 break
1841
1842
1843
1845 """ Current requirements in archive.
1846 """
1847 currentFiltData = None
1848 currentStackPosData = None
1849 currentTilePosData = None
1850 isSurveyDeep = False
1851 nCurrentStackProducts = 0
1852 nCurrentTileProducts = 0
1853 productData = None
1854
1855 stackAttrs = "productID, fieldID, filterID, nuStep, ra, dec, stackRadius"
1856
1857 tileAttrs = "productID, fieldID, filterID, nuStep, ra, dec, stackRadius, "\
1858 "posAngle"
1859 mosaicAttrs = "productID, fieldID, filterID, nuStep, pixelSize, ra, dec, " \
1860 "raExtent, decExtent, posAngle"
1861
1862
1863
1864 - def __init__(self, archive, programmeID):
1865 """
1866 """
1867
1868 self.productData = {}
1869 if archive.sysc.isVSA():
1870 self.stackAttrs += ", offsetPos, posAngle"
1871 if archive.sysc.isOSA():
1872 self.stackAttrs = "productID, fieldID, filterID, ra, dec, " \
1873 "stackRadius, posAngle"
1874 currentReqStacks = archive.query(
1875 selectStr=self.stackAttrs,
1876 fromStr="RequiredStack",
1877 whereStr="programmeID=%d" % programmeID)
1878
1879 self.nCurrentStackProducts = len(currentReqStacks)
1880
1881 if archive.sysc.isVSA():
1882 currentReqTiles = archive.query(
1883 selectStr=self.tileAttrs,
1884 fromStr="RequiredTile",
1885 whereStr="programmeID=%d" % programmeID)
1886
1887 self.nCurrentTileProducts = len(currentReqTiles)
1888
1889 currentStkPointings = [(point.fieldID, math.radians(point.ra),
1890 math.radians(point.dec), point.stackRadius, point.offsetPos,
1891 point.posAngle) for point in currentReqStacks]
1892
1893
1894
1895 currentTilePointings = [(point.fieldID, math.radians(point.ra),
1896 math.radians(point.dec), point.stackRadius, 0, point.posAngle)
1897 for point in currentReqTiles]
1898
1899 self.currentTilePosData = sorted(set(currentTilePointings))
1900 self.productData["tile"] = currentReqTiles
1901 elif archive.sysc.isOSA():
1902 currentStkPointings = [(point.fieldID, math.radians(point.ra),
1903 math.radians(point.dec), point.stackRadius,
1904 point.posAngle) for point in currentReqStacks]
1905 else:
1906 currentStkPointings = [(point.fieldID, math.radians(point.ra),
1907 math.radians(point.dec), point.stackRadius)
1908 for point in currentReqStacks]
1909
1910 self.currentStackPosData = sorted(set(currentStkPointings))
1911 self.productData["stack"] = currentReqStacks
1912
1913
1914 currentRequiredFilters = archive.query(
1915 selectStr="filterID, nPass, isSynoptic",
1916 fromStr="RequiredFilters",
1917 whereStr="programmeID=%d" % programmeID)
1918
1919
1920 self.currentFiltData = []
1921 self.isSurveyDeep = False
1922 for filterID, nPass, isSynop in currentRequiredFilters:
1923 if isSynop == 1:
1924 self.isSurveyDeep = True
1925 nusList = ([point.nuStep for point in currentReqStacks
1926 if filterID == point.filterID]
1927 if archive.sysc.hasMicroStep else [])
1928
1929 if len(nusList) > 0:
1930 typNustep = int(stats.clippedMADHistogram(nusList,
1931 retValues="median")[0])
1932 else:
1933 typNustep = dbc.intDefault()
1934
1935 if archive.sysc.hasMicroStep:
1936 self.currentFiltData.append((filterID, typNustep, nPass,
1937 isSynop))
1938 else:
1939 self.currentFiltData.append((filterID, nPass, isSynop))
1940
1941
1943 """ Best new setup from stack data.
1944 """
1945 isSurveyDeep = False
1946 isTile = False
1947 newFiltData = None
1948 newPosData = None
1949 posAngleTol = None
1950 productData = None
1951 productLinks = None
1952 productType = "stack"
1953 progID = None
1954 sysc = SystemConstants()
1955 displayDeep = False
1956
1957
1958 - def __init__(self, progTable, recreate, dateRange, displayDeeps):
1959 """
1960 """
1961 self.productLinks = []
1962
1963
1964
1965
1966
1967 archive = progTable._db
1968 self.productData = defaultdict(list)
1969 self.sysc = archive.sysc
1970 self.displayDeep = displayDeeps
1971 self.progID = progTable.getAttr("programmeID")
1972 self.posAngTol = progTable.getAttr("posAngleTolerance")
1973
1974 currentSetup = CurrentSetup(archive, self.progID)
1975 Stack = namedtuple("Stack", currentSetup.stackAttrs)
1976 Tile = namedtuple("Tile", currentSetup.tileAttrs)
1977 Mosaic = namedtuple("Mosaic", currentSetup.mosaicAttrs)
1978
1979 stkAttr = "ra dec"
1980 posAttr = "fieldID ra dec gSize"
1981 if self.sysc.hasOffsetPos:
1982 posAttr += " off"
1983 stkAttr += " off"
1984 if self.sysc.hasVarOrient:
1985 posAttr += " pa"
1986 stkAttr += " pa"
1987 self.Position = namedtuple("Position", posAttr)
1988 ActStack = namedtuple("ActStack", stkAttr)
1989
1990
1991 prevReleaseNum = archive.queryAttrMax("releaseNum",
1992 table="ProgrammeFrame",
1993 where="programmeID=%s" % self.progID) or 0
1994 externalProd = archive.query("*", "ExternalProduct",
1995 "programmeID=%s and releaseNum>%s and productType in "
1996 "('mosaic')" % (self.progID, prevReleaseNum), firstOnly=True,
1997 default=None)
1998 if externalProd:
1999 extpd = ExternalProduct(archive, externalProd, progTable, dateRange)
2000 self.productData[externalProd.productType] = extpd.getProductList(
2001 {'mosaic': Mosaic,
2002 'stack': Stack, 'tile': Tile}[externalProd.productType])
2003
2004 self.newPosData = extpd.getPosData()
2005 self.isLowGalLat = \
2006 self.checkGalacticStatus(finalStacks=[], progTable=progTable)
2007
2008 self.isSurveyDeep = extpd.prepareProvList()
2009 self.newFiltData = extpd.getFiltData()
2010 self.productLinks = extpd.getProductLinks()
2011 extpd.renameFiles()
2012
2013 return
2014
2015
2016 if progTable.isNonSurvey():
2017
2018
2019 if not self.sysc.hasOffsetPos:
2020 archive.update("Programme",
2021 entryList=[("sourceProdType", "'stack'"),
2022 ("epochFrameType", "'stack'")],
2023 where="programmeID=%s" % self.progID)
2024 if self.sysc.hasOffsetPos:
2025 prodType = ('tile' if archive.queryEntriesExist(
2026 Join(["ProgrammeFrame", "Multiframe"], ["multiframeID"]),
2027 where="programmeID=%s AND frameType like 'tile%%stack'"
2028 % self.progID) else 'stack')
2029 archive.update("Programme",
2030 entryList=[("sourceProdType", "'%s'" % prodType),
2031 ("epochFrameType", "'%s'" % prodType),
2032 ("posAngleTolerance",
2033 self.sysc.defaultPosAngleTolerance)],
2034 where="programmeID=%s" % self.progID)
2035
2036 progTable = df.ProgrammeTable(archive)
2037 progTable.setCurRow(programmeID=self.progID)
2038 self.posAngTol = progTable.getAttr("posAngleTolerance")
2039
2040
2041
2042
2043 dateRange = "utDate BETWEEN '%s' AND '%s'" % dateRange
2044
2045
2046
2047 frameTypeSel = queries.getFrameSelection('stack', noDeeps=True)
2048 nustepStr = ', nustep' if self.sysc.hasMicroStep else ''
2049 selectStr = ("m.multiframeID, (15.*rabase) as raBase, decBase, m.filterID%s, "
2050 % nustepStr
2051 + ("offsetX, offsetY, " if self.sysc.hasOffsetPos else "")
2052 + "AVG(totalExpTime) AS totalExpTime"
2053 + (", MIN(posAngle) AS posAngle" if self.sysc.hasVarOrient else
2054 ""))
2055
2056 fromStr = ("Multiframe as m,ProgrammeFrame as p,MultiframeDetector as d,"
2057 "CurrentAstrometry as c")
2058
2059 whereStr = ("m.multiframeID=p.multiframeID and m.multiframeID="
2060 "d.multiframeID and d.multiframeID=c.multiframeID and "
2061 "c.extNum=d.extNum AND %s"
2062 " AND programmeID=%s AND m.%s AND posAngle>=-180"
2063 " AND %s" % (frameTypeSel, self.progID,
2064 DepCodes.selectNonDeprecated, dateRange))
2065 groupBy = ("m.multiframeID, raBase, decBase, "
2066 "m.filterID%s" % nustepStr
2067 + (",offsetX, offsetY" if self.sysc.hasOffsetPos else "")
2068 + ",telescope, instrument")
2069 orderBy = "decBase, m.filterID%s" % nustepStr
2070
2071 stacks = archive.query(selectStr, fromStr, whereStr, groupBy, orderBy)
2072
2073 if self.sysc.hasOffsetPos and progTable.getAttr('sourceProdType') == 'tile':
2074 mfIDSel = \
2075 SelectSQL("m.multiframeID", fromStr, whereStr, groupBy=groupBy)
2076
2077 frameTypeSel = queries.getFrameSelection('tile', noDeeps=True)
2078
2079
2080 goodTileSel = SelectSQL(
2081 select="p.combiframeID",
2082 table="(%s) AS t, Provenance AS p" % SelectSQL(
2083 select="pv.combiframeID, nOffsets",
2084 table="Provenance AS pv, Multiframe AS m",
2085 where="m.multiframeID=pv.combiframeID AND %s AND %s"
2086 " AND pv.multiframeID IN (%s)" % (frameTypeSel,
2087 DepCodes.selectNonDeprecated, mfIDSel)),
2088 where="t.combiframeID=p.combiframeID AND p.multiframeID IN (%s)"
2089 % mfIDSel,
2090 groupBy="p.combiframeID, t.nOffSets"
2091 " HAVING COUNT(distinct p.multiframeID)=t.nOffsets")
2092
2093
2094 goodMfIDs = archive.query("multiframeID", "Provenance",
2095 whereStr="combiframeID IN (%s)" % goodTileSel)
2096
2097 stacks = [stack for stack in stacks
2098 if stack.multiframeID in goodMfIDs]
2099
2100 if not stacks:
2101 raise ProgrammeBuilder.CuError(
2102 "Programme %s has no non-deprecated catalogue data assigned in "
2103 "%s.ProgrammeFrame" % (progTable.getAcronym(), archive.database))
2104 if self.sysc.hasOffsetPos and self.sysc.hasVarOrient:
2105 offsets = [astro.getOffSetPos(stack.offsetX, stack.offsetY)
2106 for stack in stacks]
2107
2108 possStacks = [ActStack(math.radians(stack.raBase),
2109 math.radians(stack.decBase), offset, stack.posAngle)
2110 for stack, offset in zip(stacks, offsets)
2111 if offset >= 0]
2112 elif self.sysc.hasOffsetPos:
2113 offsets = [astro.getOffSetPos(stack.offsetX, stack.offsetY)
2114 for stack in stacks]
2115
2116 possStacks = [ActStack(math.radians(stack.raBase),
2117 math.radians(stack.decBase), offset)
2118 for stack, offset in zip(stacks, offsets)
2119 if offset >= 0]
2120 elif self.sysc.hasVarOrient:
2121 possStacks = [ActStack(math.radians(stack.raBase),
2122 math.radians(stack.decBase), stack.posAngle)
2123 for stack in stacks]
2124 else:
2125 possStacks = [ActStack(math.radians(stack.raBase),
2126 math.radians(stack.decBase)) for stack in stacks]
2127
2128 finalStacks = self.groupbyPosition(possStacks,
2129 self.sysc.maxRaDecExtentStackDeg, currentSetup.currentStackPosData,
2130 recreate)
2131
2132 self.isLowGalLat = self.checkGalacticStatus(finalStacks, progTable)
2133 if self.sysc.hasOffsetPos and self.sysc.hasVarOrient:
2134 self.productType = "tile"
2135
2136 finalTiles = self.findTiles(finalStacks,
2137 self.sysc.maxRaDecExtentStackDeg, recreate,
2138 currentSetup.currentTilePosData)
2139
2140 finalStacks = [self.Position(pID, math.degrees(ra), math.degrees(dec), gSize,
2141 ofP, posAng)
2142 for pID, ra, dec, gSize, ofP, posAng in finalStacks]
2143
2144 finalTiles = [(pID, math.degrees(ra), math.degrees(dec), gSize,
2145 posAng)
2146 for pID, ra, dec, gSize, ofP, posAng in finalTiles]
2147
2148 elif self.sysc.hasOffsetPos:
2149 self.productType = "tile"
2150
2151 finalTiles = self.findTiles(finalStacks,
2152 self.sysc.maxRaDecExtentStackDeg, recreate,
2153 currentSetup.currentTilePosData)
2154
2155 finalStacks = [self.Position(pID, math.degrees(ra), math.degrees(dec), gSize,
2156 ofP)
2157 for pID, ra, dec, gSize, ofP in finalStacks]
2158
2159 finalTiles = [(pID, math.degrees(ra), math.degrees(dec), gSize)
2160 for pID, ra, dec, gSize, ofP in finalTiles]
2161 elif self.sysc.hasVarOrient:
2162
2163 finalStacks = [self.Position(pID, math.degrees(ra), math.degrees(dec), gSize,
2164 posAng)
2165 for pID, ra, dec, gSize, posAng in finalStacks]
2166
2167
2168 else:
2169 finalStacks = [self.Position(pID, math.degrees(ra), math.degrees(dec), gSize)
2170 for pID, ra, dec, gSize in finalStacks]
2171
2172 Logger.addMessage("Found all product positions")
2173
2174
2175 allFilters = sorted(set(stack.filterID for stack in stacks))
2176
2177
2178
2179 filtNuGroups = (
2180 sorted(set((stack.filterID, stack.nustep) for stack in stacks))
2181 if self.sysc.hasMicroStep else None)
2182
2183
2184
2185
2186 microstepDict = {}
2187 self.newFiltData = []
2188 nEpochDict = {}
2189 for filterID in allFilters:
2190 isDeepFilter = False
2191 for point in finalStacks:
2192 fieldID = point[0]
2193 pos = (math.radians(point.ra), math.radians(point.dec))
2194 nuValsTExp = []
2195 dataNGood = []
2196 if self.sysc.hasMicroStep:
2197 for fID, microStep in filtNuGroups:
2198 if fID == filterID:
2199 times = []
2200 for stack in stacks:
2201 if stack.filterID == filterID \
2202 and stack.nustep == microStep \
2203 and not (self.sysc.hasOffsetPos and point.off !=
2204 astro.getOffSetPos(stack.offsetX, stack.offsetY)):
2205
2206 stackCentre = (math.radians(stack.raBase),
2207 math.radians(stack.decBase))
2208
2209 if abs(pos[1] - stackCentre[1]) \
2210 <= math.radians(point.gSize):
2211 if astro.angularSep(stackCentre, pos) \
2212 < math.radians(point.gSize):
2213 times.append(stack.totalExpTime)
2214
2215 if times:
2216 tExpTime = sum(times)
2217 nuValsTExp.append((microStep, tExpTime))
2218 dataNGood.append(len(times))
2219 else:
2220 times = []
2221 for stack in stacks:
2222 if stack.filterID == filterID \
2223 and not (self.sysc.hasOffsetPos and point.off !=
2224 astro.getOffSetPos(stack.offsetX, stack.offsetY)):
2225
2226 stackCentre = (math.radians(stack.raBase),
2227 math.radians(stack.decBase))
2228
2229 if abs(pos[1] - stackCentre[1]) \
2230 <= math.radians(point.gSize):
2231 if astro.angularSep(stackCentre, pos) \
2232 < math.radians(point.gSize):
2233 times.append(stack.totalExpTime)
2234
2235 if times:
2236 tExpTime = sum(times)
2237 nuValsTExp.append((1, tExpTime))
2238 dataNGood.append(len(times))
2239
2240
2241
2242 if nuValsTExp:
2243
2244 tExpMax = max(tExp for nus, tExp in nuValsTExp)
2245 microstepDict[(filterID, fieldID)] = \
2246 [nus for nus, tExp in nuValsTExp if tExp == tExpMax][-1]
2247
2248
2249 index = [ii for ii, (nus, tExp) in enumerate(nuValsTExp)
2250 if tExp == tExpMax][-1]
2251
2252
2253 isDeepFilter = sum(dataNGood) > 1 or isDeepFilter
2254 if sum(dataNGood) > 1 and self.displayDeep:
2255 Logger.addMessage("Multiple epochs in pointing RA %s, "
2256 "DEC %s, filter %s" %
2257 (point.ra, point.dec, filterID))
2258
2259 nEpochDict[(filterID, fieldID)] = dataNGood[index]
2260
2261
2262 self.isSurveyDeep = self.isSurveyDeep or isDeepFilter
2263 self.newFiltData.append((filterID, 1, int(isDeepFilter)))
2264
2265 Logger.addMessage("Checked filters and epochs")
2266
2267
2268
2269 self.newPosData = []
2270 tileInfo = []
2271 newProdID = (len(currentSetup.productData["stack"]) + 1
2272 if not recreate else 1)
2273
2274 for pos in finalStacks:
2275 for filtID, _nPass, _isSyn in self.newFiltData:
2276
2277
2278
2279
2280
2281
2282 micro = microstepDict.get((filtID, pos.fieldID))
2283 if micro:
2284 prodID = (self.getProdID(pos, filtID,
2285 currentSetup.productData["stack"], recreate)
2286 or newProdID)
2287
2288 isNew = prodID == newProdID
2289 if isNew:
2290 newProdID += 1
2291
2292
2293 stackProd = [prodID, pos.fieldID, filtID]
2294 if self.sysc.hasMicroStep:
2295 stackProd.extend([micro])
2296 stackProd.extend([pos.ra, pos.dec, pos.gSize])
2297 if self.sysc.hasOffsetPos:
2298 stackProd.extend([pos.off])
2299 if self.sysc.hasVarOrient:
2300 stackProd.extend([pos.pa])
2301
2302 if recreate or isNew:
2303 self.productData["stack"].append(Stack(*tuple(stackProd)))
2304
2305 if self.isTile and self.sysc.hasOffsetPos:
2306 if pos.fieldID in self.stackTileDict:
2307 tileID = self.stackTileDict[pos.fieldID]
2308 tileInfo.append((tileID, filtID, micro, pos.off,
2309 prodID))
2310 else:
2311 Logger.addMessage(
2312 "Field ID %s not in self.stackTileDict for "
2313 "pos %s, filter %s" % (pos.fieldID, pos, filtID))
2314
2315 self.newPosData.append((pos.fieldID, ra, dec, gSize))
2316
2317 if not self.sysc.hasOffsetPos or not self.isTile:
2318 return
2319
2320
2321 newProdID = (len(currentSetup.productData["tile"]) + 1
2322 if not recreate else 1)
2323
2324 for pos in finalTiles:
2325
2326 tInfo = [(filtID, micro, offPos, stkProdID)
2327 for tID, filtID, micro, offPos, stkProdID in tileInfo
2328 if tID == pos[0]]
2329
2330 filterIDs = sorted(set(fID for fID, _ms, _oP, _sPID in tInfo))
2331 for filtID in filterIDs:
2332 isNew = False
2333
2334 offPosList = sorted(oP for fID, _ms, oP, _sPID in tInfo
2335 if fID == filtID)
2336 if offPosList != range(6) and offPosList != range(3):
2337 Logger.addMessage("<Info> Tile product at position %s %s, "
2338 "filterID %s does not have the complete set of input "
2339 "stacks. Will not create it." % (pos[1], pos[2], filtID),
2340 echoOff=self.progID == self.sysc.scienceProgs.get("VHS"))
2341 else:
2342 tileProdID = self.getProdID(pos, filtID,
2343 currentSetup.productData["tile"], recreate) or newProdID
2344
2345 if tileProdID == newProdID:
2346 isNew = True
2347 newProdID += 1
2348
2349 micro = tInfo[0][1]
2350
2351 self.productLinks.extend((tileProdID, sPID, "tile", "stack")
2352 for fID, _ms, _oP, sPID in tInfo if fID == filtID)
2353
2354 tile = Tile(tileProdID, pos[0], filtID, micro, pos[1],
2355 pos[2], pos[3], pos[4])
2356
2357 if recreate or isNew:
2358 self.productData["tile"].append(tile)
2359
2360
2361
2362 - def getProdID(self, pos, filterID, currentProdData, recreate):
2363 """
2364 """
2365
2366 fieldID = pos[0]
2367 productID = 0
2368 if not recreate:
2369 productIDs = [prod.productID for prod in currentProdData
2370 if prod.fieldID == fieldID and prod.filterID == filterID]
2371
2372 if len(productIDs) == 1:
2373 productID = productIDs[0]
2374
2375 return productID
2376
2377
2378
2380 """ @return: True, if the programme is at low Galactic latitude.
2381 @rtype: bool
2382 """
2383 if not progTable.isNonSurvey():
2384 return
2385
2386 galCentre = [math.radians(192.25), math.radians(27.4)]
2387 coords = numpy.array([(stack[1], stack[2]) for stack in finalStacks])
2388 bArray = numpy.arcsin(numpy.cos(coords[:, 1]) * numpy.cos(galCentre[1])
2389 * numpy.cos(coords[:, 0] - galCentre[0]) + numpy.sin(coords[:, 1])
2390 * numpy.sin(galCentre[1]))
2391
2392 medAbsGalLat = numpy.median(numpy.abs(bArray))
2393 isLGL = medAbsGalLat < math.radians(15)
2394 Logger.addMessage("Median absolute galactic latitude is %4.1f: "
2395 "%sGALACTIC survey" % (math.degrees(medAbsGalLat),
2396 "a " if isLGL else "an EXTRA"))
2397 return isLGL
2398
2399
2400
2401 - def findTiles(self, finalStacks, maxSep, recreate, curTilePos):
2402 """
2403 """
2404 possTiles = []
2405 tilAttr = "ra dec"
2406 if self.sysc.hasOffsetPos:
2407 tilAttr += " off"
2408 if self.sysc.hasVarOrient:
2409 tilAttr += " pa"
2410
2411 tilAttr += " fieldID"
2412 ActTile = namedtuple("ActTile", tilAttr)
2413 for stack in finalStacks:
2414 tilePos = astro.findTileCentre((stack.ra, stack.dec),
2415 stack.off, stack.pa)
2416
2417 possTiles.append(ActTile(tilePos[0], tilePos[1], 0, stack.pa,
2418 stack.fieldID))
2419
2420 self.isTile = True
2421 possTiles = sorted(possTiles, key=itemgetter(1))
2422
2423
2424 finalTiles = self.groupbyPosition(possTiles, maxSep, curTilePos,
2425 recreate, onceOnly=True)
2426
2427 return self.matchStacksTiles(finalTiles, finalStacks)
2428
2429
2430
2431 - def groupbyPosition(self, interMedStacks, maxSep, currentPositions,
2432 recreate=False, onceOnly=False):
2433 """ Groups stacks according to following algorithm.
2434 """
2435 Logger.addMessage("Number of intermediate %ss is %s"
2436 % (self.productType, len(interMedStacks)),
2437 alwaysLog=False)
2438
2439 numberGroups = len(currentPositions)
2440 nUngrouped = len(interMedStacks)
2441 hasImproved = True
2442 nIterations = 0
2443
2444 oldPositions = currentPositions[:] if not recreate else []
2445 isComplete = False
2446 while (not isComplete and hasImproved and nIterations < 3):
2447
2448
2449 Logger.addMessage("Checking to see if %ss match current "
2450 "positions: iteration %d" %
2451 (self.productType, nIterations),
2452 alwaysLog=False)
2453
2454 possStacks = interMedStacks[:]
2455 Logger.addMessage("Grouping %d %ss" % (len(possStacks),
2456 self.productType),
2457 alwaysLog=False)
2458
2459
2460 if recreate:
2461 ungroupedStacks = possStacks
2462 groups = []
2463 fieldID = 0
2464 else:
2465 groups, ungroupedStacks = \
2466 self.compareStackPosit(currentPositions, possStacks)
2467 Logger.addMessage("Not recreating. No Groups: %d, no. "
2468 "ungrouped %ss: %d"
2469 % (len(groups), self.productType,
2470 len(ungroupedStacks)),
2471 alwaysLog=False)
2472 emptyGroups = [pID for pID, groupStacks in groups
2473 if not groupStacks]
2474 currentPositions = \
2475 self.removeOldGroups(currentPositions, emptyGroups)
2476 fieldID = (currentPositions[-1][0] + 1
2477 if currentPositions else 0)
2478 if ungroupedStacks:
2479 Logger.addMessage("There are %d %ss that cannot be placed in "
2480 "current groups. New deep %s positions will be created"
2481 % (len(ungroupedStacks), self.productType, self.productType),
2482 alwaysLog=False)
2483
2484
2485
2486 newGroups = self.findNewGroups(ungroupedStacks, maxSep, fieldID)
2487 groups.extend(newGroups)
2488 Logger.addMessage("Found %d new groups" % len(newGroups),
2489 alwaysLog=False)
2490
2491
2492 currentPositions = self.recalculateCentres(groups, maxSep)
2493 Logger.addMessage("Found new centres", alwaysLog=False)
2494
2495
2496 Logger.addMessage("Check exclusivity", alwaysLog=False)
2497
2498 closePairs = self.checkExclusivity(currentPositions)
2499 Logger.addMessage("Found %d close pairs" % len(closePairs),
2500 alwaysLog=False)
2501 while len(closePairs) > 0:
2502 fieldID = currentPositions[-1][0] + 1
2503 findNewPos = self.reJigClosePairs(closePairs, groups, fieldID,
2504 currentPositions)
2505
2506 currentPositions.extend(findNewPos)
2507 closePids = [pair[0] for pair in closePairs]
2508 posTupSize = len(currentPositions[0])
2509 closePids.extend(pair[posTupSize] for pair in closePairs)
2510 depPids = sorted(set(closePids))
2511 currentPositions = self.removeOldGroups(currentPositions, depPids)
2512 closePairs = self.checkExclusivity(currentPositions)
2513 Logger.addMessage("Iterating close pairs: Found %d close pairs"
2514 % len(closePairs), alwaysLog=False)
2515
2516
2517 groups, ungroupedStacks = self.compareStackPosit(currentPositions,
2518 possStacks)
2519
2520
2521 if (len(ungroupedStacks) < nUngrouped or (
2522 len(ungroupedStacks) == nUngrouped and
2523 len(groups) < numberGroups)):
2524 hasImproved = True
2525 oldPositions = currentPositions[:]
2526 else:
2527 hasImproved = False
2528 currentPositions = oldPositions[:]
2529
2530 if onceOnly:
2531 hasImproved = False
2532
2533 if not ungroupedStacks:
2534 isComplete = True
2535
2536 nUngrouped = len(ungroupedStacks)
2537 numberGroups = len(groups)
2538 nIterations += 1
2539 recreate = False
2540 Logger.addMessage("Number of groups is %d and number of ungrouped "
2541 "%ss is %d" % (len(groups), self.productType,
2542 len(ungroupedStacks)),
2543 alwaysLog=False)
2544
2545
2546 groups, ungroupedStacks = self.compareStackPosit(currentPositions,
2547 possStacks)
2548
2549 Logger.addMessage("Final number of groups is %d and number of "
2550 "ungrouped %ss is %d"
2551 % (len(groups), self.productType, len(ungroupedStacks)),
2552 alwaysLog=False)
2553
2554 return currentPositions
2555
2556
2557
2559 """ Merges two groups into one.
2560 """
2561
2562
2563
2564 gDict = dict(groups)
2565 fullGroup = gDict[pair.pID1] + gDict[pair.pID2]
2566 if fieldID is None:
2567 pos1 = [pair.pID1, pair.ra1, pair.dec1, pair.size1 * pair.frac]
2568 pos2 = [pair.pID2, pair.ra2, pair.dec2, pair.size2 * pair.frac]
2569 if self.sysc.hasOffsetPos:
2570 pos1.append(pair.off1)
2571 pos2.append(pair.off2)
2572 if self.sysc.hasVarOrient:
2573 pos1.append(pair.pa1)
2574 pos2.append(pair.pa2)
2575 newPositions = [self.Position(*tuple(pos1)), self.Position(*tuple(pos2))]
2576
2577 else:
2578 newPositions = \
2579 self.recalculateCentres([(fieldID, fullGroup)],
2580 max(pair.size1, pair.size2))
2581
2582 _newGroup, ungroupStacks = \
2583 self.compareStackPosit(newPositions, fullGroup)
2584
2585 return len(ungroupStacks), newPositions
2586
2587
2588
2590 """ ?
2591 """
2592
2593
2594 decList = [st[1] for st in possStacks]
2595 groups = []
2596 self.tileStackMatch = []
2597 stackGrp = []
2598 usedStacks = []
2599 for pos in positions:
2600 tileGrp = []
2601 if self.sysc.hasOffsetPos:
2602 grpOffID = pos.off
2603 if self.sysc.hasVarOrient:
2604 grpPosAng = pos.pa
2605 gSize = math.radians(pos.gSize)
2606 groupPos = (pos.ra, pos.dec)
2607 groupStacks = []
2608
2609
2610 minDec, maxDec = pos.dec - gSize, pos.dec + gSize
2611 begIdx = max((bisect(decList, minDec) - 1), 0)
2612 endIdx = min((bisect(decList, maxDec) + 1), len(decList))
2613 for index in range(begIdx, endIdx, 1):
2614 stInfo = possStacks[index]
2615 position = stInfo[:2]
2616 offsetMatch = (stInfo.off == grpOffID
2617 if self.sysc.hasOffsetPos else True)
2618 posAngMatch = (abs(astro.angDiff(grpPosAng, stInfo.pa)) < self.posAngTol
2619 if self.sysc.hasVarOrient else True)
2620 if abs(groupPos[1] - position[1]) >= gSize:
2621 continue
2622 if (astro.angularSep(groupPos, position) < gSize
2623 and index not in usedStacks):
2624 if (offsetMatch and posAngMatch):
2625 groupStacks.append(stInfo)
2626 usedStacks.append(index)
2627 if self.isTile:
2628 tileGrp.append(stInfo.fieldID)
2629 stackGrp.append((stInfo.fieldID, pos.fieldID))
2630 if groupStacks:
2631 groups.append((pos.fieldID, groupStacks))
2632 else:
2633 Logger.addMessage(
2634 "<WARNING> No intermediates found for groupID=%s" % pos.fieldID,
2635 echoOff=self.progID == self.sysc.scienceProgs.get("VHS"))
2636
2637 if self.isTile:
2638 self.tileStackMatch.append((pos.fieldID, tileGrp))
2639
2640 if self.isTile:
2641 self.stackTileDict = dict(stackGrp)
2642
2643 unusedStacks = [st for ii, st in enumerate(possStacks)
2644 if ii not in usedStacks]
2645
2646 return groups, unusedStacks
2647
2648
2649
2651 """ ?
2652 """
2653 maxPairFrac = 0.1
2654 newPosits = []
2655 if closePairs:
2656 if len(closePairs) < maxPairFrac * len(groups):
2657
2658 for pair in closePairs:
2659 nOutliersMerge, newPosMerge = self.getMergedGroup(pair, groups, pID)
2660 nOutliersResize, newPosResize = self.getMergedGroup(pair, groups)
2661 pID += 1
2662 newPosits += \
2663 (newPosMerge if nOutliersMerge <= nOutliersResize else
2664 newPosResize)
2665 else:
2666
2667 avFrac = numpy.median([pair[8] for pair in closePairs])
2668 for pos in positions:
2669 ra, dec, gSize = pos[1:4]
2670 if self.sysc.isVSA():
2671 newPosits.append((pID, ra, dec, 0.9 * avFrac * gSize,
2672 pos[4], pos[5]))
2673 else:
2674 newPosits.append((pID, ra, dec, 0.9 * avFrac * gSize))
2675
2676 return newPosits
2677
2678
2679
2681 """ Checks group positions to see if they are exclusive.
2682 """
2683
2684
2685
2686
2687 attrs = ['pID', 'ra', 'dec', 'size']
2688 if self.sysc.hasOffsetPos:
2689 attrs.append('off')
2690 if self.sysc.hasVarOrient:
2691 attrs.append('pa')
2692 pairAttr = ''
2693 for pairPos in ['1', '2']:
2694 pairAttr += ' '.join(['%s%s' % (attr, pairPos) for attr in attrs]) + ' '
2695
2696 pairAttr += "frac"
2697
2698
2699 ClosePair = namedtuple("ClosePair", pairAttr)
2700
2701 finPosDec = sorted(newPositions, key=itemgetter(2))
2702 closePairs = []
2703 for ii, pos in enumerate(finPosDec):
2704 _pID, ra, dec, gSize = pos[:4]
2705
2706 isLessMaxDist = True
2707 index = ii + 1
2708 isPosMatch = True
2709 if self.sysc.isVSA():
2710
2711 offID = pos[4]
2712 posAng = pos[5]
2713
2714 while isLessMaxDist and index < len(finPosDec):
2715 if self.sysc.isVSA() and (offID != finPosDec[index][4] or
2716 abs(astro.angDiff(posAng, finPosDec[index][5])) >
2717 2. * self.posAngTol):
2718 isPosMatch = False
2719
2720 totalSep = gSize + finPosDec[index][3]
2721 isLessMaxDist = finPosDec[index][2] - dec < math.radians(
2722 totalSep)
2723
2724 if isLessMaxDist and isPosMatch:
2725 dist = astro.angularSep((ra, dec),
2726 (finPosDec[index][1], finPosDec[index][2]))
2727
2728 if dist < math.radians(totalSep):
2729 dist /= math.radians(totalSep)
2730 closePairs.append(ClosePair(*pos + finPosDec[index] + (dist,)))
2731
2732 index += 1
2733
2734 return closePairs
2735
2736
2737
2739 """ Recalculate group centres and sizes.
2740 """
2741
2742 minSize = self.sysc.minRaDecExtentStackDeg
2743 newPositions = []
2744 for pID, groupStacks in groups:
2745 if len(groupStacks) == 0:
2746 Logger.addMessage("<WARNING> Group has no inputs: %s" % pID)
2747 continue
2748 if len(groupStacks) > 1:
2749 groupCartCoords = astro.convSkyToCartesian(
2750 numpy.array([(pos[0], pos[1]) for pos in groupStacks]))
2751
2752 gCentre = astro.calcGroupCentre(groupCartCoords)
2753 raMed, decMed = gCentre[:2]
2754 else:
2755 raMed, decMed = groupStacks[0][:2]
2756
2757 if self.sysc.hasOffsetPos:
2758 offID = groupStacks[0].off
2759 if self.sysc.hasVarOrient:
2760 grpPA = astro.getMedAngle([pos.pa for pos in groupStacks],
2761 1.5 * self.posAngTol)
2762
2763 distCentre = []
2764
2765 for index in range(len(groupStacks)):
2766 distCentre.append(astro.angularSep((raMed, decMed), (
2767 groupStacks[index][0], groupStacks[index][1])))
2768
2769 typDist = min(max(minSize, 1.1 * math.degrees(max(distCentre))), maxDist)
2770
2771 pos = [pID, raMed, decMed, typDist]
2772
2773 if self.sysc.hasOffsetPos:
2774 pos.append(offID)
2775 if self.sysc.hasVarOrient:
2776 pos.append(grpPA)
2777
2778 newPositions.append(self.Position(*tuple(pos)))
2779
2780 return newPositions
2781
2782
2783
2785 """ Takes ungroupedStacks and groups them together.
2786 """
2787 groups = []
2788 for position in possStacks:
2789
2790 if groups:
2791 groups = self.findBestGroup(groups, position, maxSep)
2792 else:
2793 groups.append((fieldID, [position]))
2794
2795 return groups
2796
2797
2798
2800 """
2801 Criteria: All objects must be within 2*maxSep (diameter) of all others
2802 Count best matches if <scale*maxSep.
2803
2804 """
2805
2806
2807 scale = 1.0
2808 maxWidth = 1.5
2809 nMtcGrp = []
2810 for pID, groupStacks in groups:
2811 nMatch = 0
2812 isCorPoint = True
2813
2814 for gPos in groupStacks:
2815 if abs(gPos[1] - position[1]) > maxWidth * math.radians(maxSep):
2816 nMatch = 0
2817 break
2818 rad = astro.angularSep(gPos[:2], position[:2])
2819 if rad > 2. * math.radians(maxSep):
2820 nMatch = 0
2821 break
2822 if (self.sysc.hasOffsetPos and gPos.off != position.off):
2823 isCorPoint = False
2824 if (self.sysc.hasVarOrient and abs(astro.angDiff(gPos.pa, position.pa)) >
2825 scale * self.posAngTol):
2826 isCorPoint = False
2827 if isCorPoint and rad < scale * math.radians(maxSep):
2828 nMatch += 1
2829
2830 nMtcGrp.append((pID, nMatch))
2831
2832 maxMatch = max(nMatch for pID, nMatch in nMtcGrp)
2833 if maxMatch > 0:
2834 match = False
2835 index = 0
2836 while not match:
2837 if nMtcGrp[index][1] == maxMatch:
2838 match = True
2839 fieldID = nMtcGrp[index][0]
2840 newGroups = []
2841 for pID, groupStacks in groups:
2842 if pID == fieldID:
2843 groupStacks.extend([position])
2844
2845 newGroups.append((pID, groupStacks))
2846
2847 groups = newGroups
2848
2849 index += 1
2850 else:
2851 newPid = max(pID for pID, groupStacks in groups) + 1
2852 groups.append((newPid, [position]))
2853
2854 return groups
2855
2856
2857
2859 """ Have matches, just make sure complete and consistent.
2860 """
2861
2862
2863 stacks = sorted(finalStack)
2864 matches = dict(self.tileStackMatch)
2865 badTiles = []
2866 tilefIDs = set()
2867 for tile in finalTiles:
2868 if tile[0] in matches:
2869 fIDs = matches[tile[0]]
2870 offsets = sorted(stacks[fID][4] for fID in fIDs)
2871 if offsets != range(6) and offsets != range(3):
2872 badTiles.append(tile[0])
2873 Logger.addMessage("<WARNING> Not enough pawprints to "
2874 "create a complete tile in tile %d: excluding tile "
2875 "pointing from list and resorting" % tile[0])
2876 else:
2877 tilefIDs.update(fIDs)
2878 else:
2879 badTiles.append(tile[0])
2880 Logger.addMessage("<WARNING> No pawprints found in tile "
2881 "field %d: excluding tile pointing from list and resorting"
2882 % tile[0])
2883
2884
2885 allPPts = set(stack[0] for stack in finalStack)
2886
2887
2888 if allPPts.difference(tilefIDs):
2889 Logger.addMessage("<WARNING> The following pawprint fields are not"
2890 " being tiled: %s"
2891 % ','.join(str(fID) for fID in allPPts.difference(tilefIDs)))
2892
2893
2894 finalTiles = sorted(tile for tile in finalTiles
2895 if tile[0] not in badTiles)
2896
2897 return finalTiles
2898
2899
2900
2902 """ Removes deprecated Pids and then reindexes.
2903 """
2904 dodgyGrp = []
2905 for index, pos in enumerate(finalPositions):
2906 if pos[0] in depPids:
2907 dodgyGrp.append(index)
2908
2909 for ii, index in enumerate(dodgyGrp):
2910 finalPositions.pop(index - ii)
2911
2912 return [self.Position(*tuple([ii] + list(pos[1:])))
2913 for ii, pos in enumerate(finalPositions)]
2914
2915
2916
2918 """
2919 This determines the setup of the multi-epoch tables based on the details in
2920 Programme.
2921
2922 """
2923 bandMergingCrit = None
2924 bestMatchTables = None
2925 corBMTable = None
2926 dateRangeOfTable = None
2927 isDeep = False
2928 mainDateRange = None
2929 programme = None
2930 synopticMergeLogTable = None
2931 synopticSourceTable = None
2932 sysc = None
2933 uncorBMTable = None
2934 varBMTable = None
2935 isCorSingleEpoch = False
2936 specialFilter = None
2937
2938
2939 - def __init__(self, progTable, dateRange=None, isDeep=None,
2940 extBandMergeCrit=None):
3030
3031
3032
3034 """@return: Named tuple of the BestMatch table attributes
3035 @rtype: namedtuple()
3036 """
3037 isCorrel = 'SynopticSource' in bmTable
3038 if isCorrel:
3039 secObjID = 'synopticID'
3040 secName = self.programme.getSynopticSourceTable()
3041 else:
3042 secObjID = 'objID'
3043 secName = self.programme.getDetectionTable()
3044
3045 dateRange = self.getTimeRange(isCorrel)
3046
3047 BestMatch = namedtuple("BestMatch",
3048 "secObjID secName isCorrel dateRange tableName")
3049
3050 return BestMatch(secObjID, secName, isCorrel, dateRange, bmTable)
3051
3052
3053
3055 """@return: SynopticSourceTable name
3056 @rtype: str
3057 """
3058 return self.synopticSourceTable
3059
3060
3061
3063 """@return: Returns correlated neighbour table name if it exists
3064 @rtype: str
3065 """
3066 corTables = [table for table in self.neighTables
3067 if 'SynopticSource' in table]
3068
3069 if corTables:
3070 return corTables[0]
3071
3072
3073
3075 """@return: SynopticMergeLogTable name
3076 @rtype: str
3077 """
3078 return self.synopticMergeLogTable
3079
3080
3081
3083 """@return: List of all the Synoptic neighbour tables produced
3084 @rtype: list(str)
3085 """
3086 return self.neighTables
3087
3088
3089
3091 """@return: Name of the Neighbour table used by in the
3092 Variability table
3093 @rtype: str
3094 """
3095 return self.varBMTable.replace('BestMatch', '')
3096
3097
3098
3100 """@return: The time range for a given BestMatch table
3101 @rtype: namedtuple(DateTime, DateTime)
3102 """
3103 for bestMatchTable in self.bestMatchTables:
3104 isCorrect = (('SynopticSource' in bestMatchTable and isCorrelated)
3105 or ('Detection' in bestMatchTable and not isCorrelated))
3106
3107 if isCorrect:
3108 return self.dateRangeOfTable[bestMatchTable]
3109
3110 return self.mainDateRange
3111
3112
3113
3115 """@return: The time range for a given BestMatch table
3116 @rtype: namedtuple(DateTime, DateTime)
3117 """
3118 if neighTable not in self.getSynopNeighTables():
3119 raise ProgrammeBuilder.CuError(
3120 "Table %s is not a neighbour table in this programme %s"
3121 % (neighTable, self.programme.getAcronym()))
3122
3123 return {self.uncorBMTable.replace("BestMatch", ''): 1,
3124 self.corBMTable.replace('BestMatch', ''): 3}[neighTable]
3125
3126
3127
3129 """@return: Is the variability table correlated
3130 @rtype: bool
3131 """
3132 return self.varBMTable == self.corBMTable
3133
3134
3135
3141
3142
3143
3145 """@return: Is there a SourceXDetectionBM table?
3146 @rtype: bool
3147 """
3148 return self.corBMTable in self.bestMatchTables
3149
3150
3151
3165
3166
3168 """ Processes each substring.
3169 """
3170 if subString.upper() == SystemConstants.bthSubStr:
3171
3172 self.neighTables = [self.uncorBMTable.replace('BestMatch', ''),
3173 self.corBMTable.replace('BestMatch', '')]
3174 self.bestMatchTables = [self.uncorBMTable, self.corBMTable]
3175
3176 elif subString.upper() == SystemConstants.uncSubStr:
3177 self.neighTables = [self.uncorBMTable.replace('BestMatch', '')]
3178 self.bestMatchTables = [self.uncorBMTable]
3179
3180 elif subString.upper() == SystemConstants.corSubStr:
3181 self.neighTables = [self.corBMTable.replace('BestMatch', '')]
3182 self.bestMatchTables = [self.corBMTable]
3183
3184 elif subString.upper().startswith(SystemConstants.varSubStr):
3185 varTabType = subString.split('-')[-1].upper()
3186 if varTabType == SystemConstants.uncSubStr:
3187 self.varBMTable = self.uncorBMTable
3188
3189 elif varTabType == SystemConstants.corSubStr:
3190 self.varBMTable = self.corBMTable
3191 elif (subString.upper().startswith(SystemConstants.corSubStr) and
3192 'ONEEPOCH' in subString):
3193 self.isCorSingleEpoch = True
3194
3195 self.bestMatchTables = [bmTable for bmTable in self.bestMatchTables
3196 if bmTable != self.corBMTable]
3197 elif (subString.upper().startswith('FIL')):
3198
3199 parts = subString.split(',')
3200 self.specialFilter = {'Name':parts[1]}
3201 orderList = []
3202 for ii, part in enumerate(parts):
3203 if ii > 1:
3204 if '-' in part:
3205 key, value = part.split('-')
3206 self.specialFilter[key] = value
3207 orderList.append(key)
3208 self.specialFilter['order'] = orderList
3209 elif ',' in subString:
3210
3211 prts = subString.split(',')
3212 if len(prts) == 3:
3213
3214 dRange = self.sysc.obsCal.dateRange(prts[1], prts[2])
3215 beginDT = max(dRange.begin, self.mainDateRange.begin)
3216 endDT = min(dRange.end, self.mainDateRange.end)
3217 tableName = {SystemConstants.uncSubStr: self.uncorBMTable,
3218 SystemConstants.corSubStr: self.corBMTable}[prts[0]]
3219
3220 self.dateRangeOfTable[tableName] = DateRange(beginDT, endDT)
3221
3222
3223
3225 """
3226 Calculates a matching radius for the synoptic neighbour table, in the
3227 range 1 to 10 arcsec, based on the density of sources in the source
3228 table and updates RequiredNeighbours in the current database.
3229
3230 """
3231 Logger.addMessage("Calculating optimal synoptic neighbour radius...")
3232 db = self.programme._db
3233 passbands = df.PassbandList(self.programme).getMfdCols()
3234 areaInfo = [db.query(
3235 selectStr="frameSetID, axis1length*axis2length*xPixSize*yPixSize",
3236 fromStr=self.programme.getMergeLogTable()
3237 + " AS ML, MultiframeDetector AS M, CurrentAstrometry AS C",
3238 whereStr="ML.%s=M.multiframeID AND M.multiframeID=C.multiframeID"
3239 " AND ML.%s=M.extNum AND M.extNum=C.extNum AND M.extNum>0"
3240 " AND M.multiframeID>0 AND M.%s"
3241 % (pFrame + (DepCodes.selectNonDeprecated,)))
3242 for pFrame in passbands]
3243
3244 totalAreaAsecSq = 0
3245 for frameSetID in \
3246 db.query("frameSetID", self.programme.getMergeLogTable()):
3247
3248 areas = [area for frame in areaInfo for fSetID, area in frame
3249 if fSetID == frameSetID]
3250
3251 totalAreaAsecSq += max(numpy.median(areas), 0)
3252
3253 numSources = db.queryNumRows(self.programme.getSourceTable())
3254
3255 density = numSources / totalAreaAsecSq if totalAreaAsecSq > 0 else 0
3256 radius = 1 / math.sqrt(math.pi * density) if density > 0 else 0
3257 minMatchRadius = 1
3258 maxMatchRadius = 10
3259 joinCriterion = max(min(radius, maxMatchRadius), minMatchRadius) / 3600
3260 for neighbourTable in self.getSynopNeighTables():
3261 done = db.update("RequiredNeighbours",
3262 "joinCriterion=%s" % joinCriterion,
3263 where="neighbourTable=%r" % neighbourTable)
3264
3265 Logger.addMessage(
3266 "...updated RequiredNeighbours for " + neighbourTable
3267 if done else "...failed on " + neighbourTable)
3268
3269
3270
3299
3300
3301
3303 """
3304 """
3305 return dict((line.split()[2][:-1], status.dropTables)
3306 for line in tableLines if 'create table' in line.lower())
3307
3308
3309