Coverage for breaker/stats/survey_footprint.py : 54%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
#!/usr/local/bin/python # encoding: utf-8 *Generate stats for a gravitational wave survey campaign footprint* """ ################# GLOBAL IMPORTS #################### # SUPPRESS MATPLOTLIB WARNINGS
""" *Generate stats for a gravitational wave survey campaign footprint*
**Key Arguments:** - ``log`` -- logger - ``settings`` -- the settings dictionary - ``gwid`` -- the unique wave id - ``telescope`` -- select a single telescope. The default is to select all telescopes. Default *False* [False|ps1|atlas]
**Usage:**
To generate cummulative stats for the wave "G184098"
.. code-block:: python
from breaker.stats import survey_footprint stats = survey_footprint( log=log, settings=settings, gwid="G184098", telescope=False ) stats.get() """ # Initialisation
self, log, settings=False, gwid=False, telescope=False ):
# INITIAL ACTIONS - CONNECT TO THE DATABASE REQUIRED log=self.log, settings=self.settings )
""" *get the survey footprint stats and print to screen/file*
**Return:** - ``None`` """ self.log.info('starting the ``get`` method')
# GRAB METADATA FROM THE DATABASES this = plot_wave_observational_timelines( log=self.log, settings=self.settings) plotParameters, ps1Transients, ps1Pointings, altasPointings, atlasTransients = this.get_gw_parameters_from_settings( gwid=self.gwid)
if self.telescope == "atlas": pointings = altasPointings pointingSide = 5.46 if self.telescope == "ps1": pointings = ps1Pointings pointingSide = 0.4 telescope = self.telescope.upper()
# SORT ALL POINTINGS VIA MJD pointings = sorted(list(pointings), key=itemgetter('mjd'), reverse=False)
nside, hpixArea, aMap, healpixIds, wr, wd = self._create_healpixid_coordinate_grid()
print "EXPID, RA, DEC, MJD, EXPTIME, FILTER, LIM-MAG, EXP-AREA, EXP-LIKELIHOOD, CUM-AREA, CUM-LIKELIHOOD" % locals()
allHealpixIds = np.array([]) dictList = [] iindex = 0 count = len(pointings) for pti, pt in enumerate(pointings): pti = pti + 1
if pti > 1: # Cursor up one line and clear line sys.stdout.write("\x1b[1A\x1b[2K")
percent = (float(pti) / float(count)) * 100. print '%(pti)s/%(count)s (%(percent)1.1f%% done): summing total area and likelihood covered by %(telescope)s' % locals()
thisDict = collections.OrderedDict(sorted({}.items())) pra = pt["raDeg"] pdec = pt["decDeg"] pmjd = pt["mjd"] pexpid = pt["exp_id"] pexptime = pt["exp_time"] pfilter = pt["filter"] plim = pt["limiting_magnitude"]
# DETERMINE THE CORNERS FOR EACH ATLAS EXPOSURE AS MAPPED TO THE # SKY decCorners = (pdec - pointingSide / 2, pdec + pointingSide / 2) corners = [] for d in decCorners: if d > 90.: d = 180. - d elif d < -90.: d = -180 - d raCorners = (pra - (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR), pra + (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR)) for r in raCorners: if r > 360.: r = 720. - r elif r < 0.: r = 360. + r corners.append(hp.ang2vec(r, d, lonlat=True))
# FLIP CORNERS 3 & 4 SO HEALPY UNDERSTANDS POLYGON SHAPE corners = [corners[0], corners[1], corners[3], corners[2]]
# RETURN HEALPIXELS IN EXPOSURE AREA expPixels = hp.query_polygon(nside, np.array( corners))
expProb = [] expProb[:] = [aMap[i] for i in expPixels] expProb = sum(expProb) expArea = len(expPixels) * hpixArea if expProb / expArea < 2e-6: continue
pindex = "%(iindex)05d" % locals() iindex += 1
allHealpixIds = np.append(allHealpixIds, expPixels) allHealpixIds = np.unique(allHealpixIds) cumProb = [] cumProb[:] = [aMap[int(i)] for i in allHealpixIds] cumProb = sum(cumProb) cumArea = len(allHealpixIds) * hpixArea thisDict["INDEX"] = pindex thisDict["EXPID"] = pexpid thisDict["RA"] = "%(pra)5.5f" % locals() thisDict["DEC"] = "%(pdec)5.5f" % locals() thisDict["MJD"] = "%(pmjd)6.6f" % locals() thisDict["EXPTIME"] = "%(pexptime)02.1f" % locals() thisDict["FILTER"] = pfilter try: thisDict["LIM-MAG"] = "%(plim)5.2f" % locals() except: thisDict["LIM-MAG"] = "NaN" # thisDict["EXP-AREA"] = expArea # thisDict["EXP-LIKELIHOOD"] = expProb thisDict["CUM-AREA"] = "%(cumArea)05.2f" % locals() thisDict["CUM-LIKELIHOOD"] = "%(cumProb)05.2f" % locals() dictList.append(thisDict)
print "AREA: %(cumArea)0.2f. PROB: %(cumProb)0.5f" % locals()
printFile = self.settings["output directory"] + "/" + \ self.gwid + "/" + self.gwid + "-" + self.telescope + "-coverage-stats.csv"
# RECURSIVELY CREATE MISSING DIRECTORIES if not os.path.exists(self.settings["output directory"] + "/" + self.gwid): os.makedirs(self.settings["output directory"] + "/" + self.gwid)
dataSet = list_of_dictionaries( log=self.log, listOfDictionaries=dictList, ) csvData = dataSet.csv(filepath=printFile)
print "The coverage stats file was written to `%(printFile)s`" % locals()
self.log.info('completed the ``get`` method') return None
self): """*create healpixid coordinate grid* """
# GET THE PROBABILITY MAP FOR THE GIVEN GWID "gravitational waves"][self.gwid]["mapPath"] message = "the path to the map %s does not exist on this machine" % ( pathToProbMap,) self.log.critical(message) raise IOError(message)
# UNPACK THE PLOT PARAMETERS
# DETERMINE THE PIXEL GRID X,Y RANGES
# CREATE A NEW WCS OBJECT
# SET THE REQUIRED PIXEL SIZE
# SET THE REFERENCE PIXEL TO THE CENTRE PIXEL
# FOR AN ORTHOGONAL GRID THE CRPIX2 VALUE MUST BE ZERO AND CRPIX2 # MUST REFLECT THIS
# WORLD COORDINATES AT REFERENCE PIXEL # USE THE "GNOMONIC" PROJECTION ("COORDINATESYS---PROJECTION")
# CREATE THE FITS HEADER WITH WCS
# CREATE A PIXEL GRID - 2 ARRAYS OF X, Y
# CONVERT THE PIXELS TO WORLD COORDINATES
# MAKE SURE RA IS +VE
# READ HEALPIX MAPS FROM FITS FILE # THIS FILE IS A ONE COLUMN FITS BINARY, WITH EACH CELL CONTAINING AN # ARRAY OF PROBABILITIES (3,072 ROWS)
# import matplotlib.pyplot as plt # hp.mollview(aMap, title="mollview image RING", cmap="YlOrRd") # hp.graticule() # plt.show()
# HEALPY REQUIRES RA, DEC IN RADIANS AND AS TWO SEPERATE ARRAYS
# THETA: IS THE POLAR ANGLE, RANGING FROM 0 AT THE NORTH POLE TO PI AT THE SOUTH POLE. # PHI: THE AZIMUTHAL ANGLE ON THE SPHERE FROM 0 TO 2PI # CONVERT DEC TO THE REQUIRED HEALPIX FORMAT
# CONVERT WORLD TO HEALPIX INDICES (NON-UNIQUE IDS!) phi=wr * self.DEG_TO_RAD_FACTOR)
self, exposures, pointingSide ): """ *generate a the likeihood coverage of an exposure*
**Key Arguments:** - ``exposures`` -- a dictionary of exposures with the unique exposures IDs as keys and (ra, dec) as tuple value. - ``squareSize`` -- the size of the FOV of the exposure/skycell
**Return:** - ``exposureIDs`` -- a list of the exposureIDs as they appear in the original input exposure dictionary - ``pointingSide`` -- a list of total likeihood coverage of the exposures
**Usage:**
See class docstring """
# DETERMINE THE CORNERS FOR EACH ATLAS EXPOSURE AS MAPPED TO THE # SKY pdec + pointingSide / 2) d = 180. - d d = -180 - d pra + (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR)) r = 720. - r r = 360. + r
# FLIP CORNERS 3 & 4 SO HEALPY UNDERSTANDS POLYGON SHAPE corners[3], corners[2]]
# RETURN HEALPIXELS IN EXPOSURE AREA corners))
# use the tab-trigger below for new method # xt-class-method |