-
Notifications
You must be signed in to change notification settings - Fork 0
/
SnP_funcs.py
1364 lines (1089 loc) · 54.9 KB
/
SnP_funcs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
The functions that are used in all the modules of the SALT&PEPPER pipeline
Author: George Hume
2023
"""
### IMPORTS ###
import csv
import json
import time
import numpy as np
import datetime as dt
from skyfield import almanac
from skyfield.api import N, E, wgs84, load, utc, Star
from skyfield.positionlib import Apparent
from skyfield.framelib import galactic_frame
import subprocess
from astropy import units as u
from astropy.coordinates import SkyCoord
import warnings #stops warning re: deprecation
with warnings.catch_warnings():
warnings.simplefilter("ignore")
from astroquery.vizier import Vizier
from astroquery.ipac.ned import Ned
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import os
import glob
import requests
import ltrtml
################# FUNCTIONS FOR UPDATING TNS DATABASE ##########################
def loadDB(filename):
"""
Function to load in the TNS database from its CSV file
Arguments:
- filename: A string representing file name of the TNS database (usually 'tns_public_objects.csv')
Outputs:
- date: the date the TNS database was updated as a string in the format '%Y-%m-%d %H:%M:%S'
- headers: the first row of the database containing the column headers as a list
- database: numpy object array containg all the entries of the TNS database
"""
file=open(filename)
csvreader = csv.reader(file) #openfile as csv
date = next(csvreader) #save the date
headers = next(csvreader) #save headers
database = []
for row in csvreader:
database.append(row) #save all rows into a list
#convert list into numpy array
database = np.array(database,dtype="object")
file.close()
return date[0], headers, database
################################################################################
def dload(file, creds):
"""
Function to download CSV files from the TNS via a TNS bot with an API key.
Arguments:
- file: A string representing file name on the TNS database (e.g., 'tns_public_objects.csv')
- creds: A JSON file containing the tns_id, name, and api_key of the TNS bot
Outputs:
- Saves the file to the xOUTPUTS directory
"""
#string that consitutues the curl commnad for downloading
cmd1 = '''curl -X POST -H 'user-agent: tns_marker{"tns_id":'''
cmd2 = ''',"type": "bot", "name":"'''
cmd3 = '''"}' -d 'api_key='''
cmd4 = '''' https://www.wis-tns.org/system/files/tns_public_objects/'''
cmd5 = '''.zip > ../xOUTPUTS/'''
cmd6 = '''.zip'''
cmd = f'{cmd1}{creds["tns_id"]}{cmd2}{creds["name"]}{cmd3}{creds["api_key"]}{cmd4}{file}{cmd5}{file}{cmd6}'
#do the curl command to download the update file
subprocess.call(cmd,shell=True)
#unzip the csv and delete the zip file
uzip = f"unzip ../xOUTPUTS/{file}.zip -d ../xOUTPUTS/"
rem = f"rm ../xOUTPUTS/{file}.zip"
subprocess.call(uzip,shell=True)
subprocess.call(rem,shell=True)
################################################################################
def UPdate(ufile,date,database):
"""
This function updates the local TNS database using update files from the TNS server.
Arguments:
- ufile: a string representing the name of the update file from the TNS. Form is 'tns_public_objects_YYYYMMDD.csv'.
- date: todays date as a datetime object.
- database: the values of the tns database (minus the date and headers) as numpy array.
Outputs:
- a newly updated tns_public_objects.csv file
"""
#load in update entries (skip date and headers tho)
UDname = f"../xOUTPUTS/{ufile}"
dummy,headers,updates = loadDB(UDname)
#now need to loop through each entry in the updates and find match/ or add if new to the database
#need to loop from bottom to top so to add newest entries to the top of the database
for i in range(len(updates)):
row = updates[-(i+1)] #extarcts rows from bottom upwards (index -1 -> -len)
ID = row[0] #get the id of the update
IDrow = np.where(database == ID)[0] # looks for row corresponding to ID in the database
if IDrow.size == 0: #if id is not in database then this is a new ID
database = np.vstack([row,database]) #inserts new ID at the top of the database
else: #if it is in database then we need to update the entry
database[IDrow[0]] = row
database = np.vstack([headers,database]) #add the headers back to the top
#save out the database
filename = "../xOUTPUTS/tns_public_objects.csv"
with open(filename, 'w') as file:
csvwriter = csv.writer(file,delimiter=",") # create a csvwriter object
csvwriter.writerow([date.strftime('%Y-%m-%d %H:%M:%S')]) #add date to first row
csvwriter.writerows(database)
################################################################################
def delay():
"""
Incduces a delay in the code until the next midnight if it is less than 14hrs in the future. This allows downloading of the TNS updates as close as possible to when they are released.
"""
now = dt.datetime.utcnow()
nmn = dt.datetime.combine(now + dt.timedelta(days=1), dt.datetime.min.time()) #the next midnight
#find time to next midnight
t_diff = (nmn-now).total_seconds()/3600 #time in hrs to next midnight
if t_diff < 14:
later = nmn #if less than 14 hours to next midnight then wait
else: #if more than 14 hours then likely haven't downloaded new TNS yet
return # no delay - download ASAP
#delay until later
while now < later:
now = dt.datetime.utcnow()
time.sleep(600) #wait 10 mins for TNS to release updates
return
################# FUNCTIONS FOR CALCULATING PRIORITY SCORES ####################
def TNSlice(database,date):
"""
Function that slices the TNS database so only the transients discovered in the last 3 months
and modified in the last 2 weeks are left.
Arguments:
- database: numpy object array contining the TNS database
- date: string representing the date that the TNS database was released (in format '%Y-%m-%d %H:%M:%S')
Outputs:
- sliceDB: numpy object array of the sliced TNS database
"""
#extract the time modified and the time discovered
t_mod = database.T[-1]
t_disc = database.T[12]
#convert the date csv was released as datetime object
rdate = dt.datetime.strptime(date, '%Y-%m-%d %H:%M:%S')
#datetime limits for the time modified and time discovered
fortnight = rdate - dt.timedelta(weeks=2) #2 weeks ago
threemonths = rdate - dt.timedelta(weeks=12) #3 months ago (or 12 weeks)
good_tars = [] #empty list to save rows which pass the slicing to
for i in range(t_mod.size):
#convert times to datetime objects
tMOD = dt.datetime.strptime(t_mod[i], '%Y-%m-%d %H:%M:%S')
tDISC = dt.datetime.strptime(t_disc[i][:-4], '%Y-%m-%d %H:%M:%S')
#slice [:-4] is to remove fractions of secs
#if the discovery date is less than 3 months ago then can include
if tDISC > threemonths:
#if the time modified is less than a fortnight ago then can add it to new array
if tMOD > fortnight:
good_tars.append(database[i])
sliceDB = np.array(good_tars,dtype=object)
return sliceDB
################################################################################
def Visibility(ra, dec, lat, long, elv, ephm = 'de421.bsp'):
"""
Function that calaculates the observable time, lunar separation and transit altitude
of a list of targets given their right ascension and declination, the latitude
longitude and elevation of the elevation, and the date of the start of the night.
Arguments:
- ra: list of right ascensions of the targets (in decimal degrees)
- dec: list of declinations of the targets (in decimal degrees)
- lat: the latitude of the location (in decimal degrees)
- long: the eastwards longitude of the location (in decimal degrees)
- elv: the elevation of the location (in metres)
- ephm: the path to the ephemerides file for skyfield (default is 'de421.bsp')
Outputs:
- tObs: the time in hours that the target is above 35 altitude in dark time
- tAlt: the altitude of the target when it transits the meridian (in decimal degrees)
- lSep: the average separation between the moon and the target during the night (in decimal degrees)
"""
#convert date to datetime object at midday
today = dt.datetime.combine(dt.datetime.now(), dt.datetime.min.time()) + dt.timedelta(days=0.5)
today =today.replace(tzinfo=utc)
tomorrow = today + dt.timedelta(days=1) #next day at midday
tomorrow = tomorrow.replace(tzinfo=utc)
### Set-up sky-field observing ##
location = wgs84.latlon(lat * N, long * E, elevation_m = elv) #location of observatory
ts = load.timescale() #loads in timescale
eph = load(ephm) #loads in ephemerides
#sets up sun, earth (needed for calculating dark time and our location respectivly) and moon (for illumination, etc.)
earth, sun, moon = eph['earth'], eph['sun'], eph['moon']
Epos = earth + location #sets up observing position (i.e., the postion of the follow-up telescope)
#makes time objects from today and tomorrow
t0 = ts.from_datetime(today)
t1 = ts.from_datetime(tomorrow)
### Find the dark time start and end ###
f = almanac.dark_twilight_day(eph, location)
times, events = almanac.find_discrete(t0, t1, f)
sunset = times[0]
darkstart = times[3]
darkend = times[4]
sunrise = times[7] #using the indcies to extract the different times of night
#save the sunset/rise and twilight times as JSON the use in visplots and follow-up script
solar_times = {
"nightstart_date": today.strftime('%Y-%m-%d'),
"sunset": sunset.utc_datetime().strftime('%H:%M:%S'),
"darkstart": darkstart.utc_datetime().strftime('%H:%M:%S'),
"darkend": darkend.utc_datetime().strftime('%H:%M:%S'),
"sunrise": sunrise.utc_datetime().strftime('%H:%M:%S'),
"nightend_date": sunrise.utc_datetime().strftime('%Y-%m-%d')
}
with open(f'../xOUTPUTS/solar_times.json', 'w') as fp:
json.dump(solar_times, fp,indent=4)
#find the UTC time at the middle of the night
middark = ts.from_datetime(darkstart.utc_datetime()+((darkend.utc_datetime() - darkstart.utc_datetime())/2))
darktimes = [darkstart,middark,darkend] #list of the time at the start, middle, and end of dark time
## calculate moon's alt, phase, and illumination ##
midnight = t0 + dt.timedelta(hours=12)
mastro = Epos.at(midnight).observe(moon)
mapp = mastro.apparent()
malt, maz, mdst = mapp.altaz()
mphase = almanac.moon_phase(eph, t0)
mill = almanac.fraction_illuminated(eph,"moon",midnight)
malt, mphase = malt.degrees, mphase.degrees
## FUNCTIONS FOR CALCULATING OBSERVABLE TIME OF TARGET ##
def transit_time(tar,t_start,t_end):
"""
Function that finds the transit time (in UTC) of a target between two times (need to be 24hrs apart)
and the altitude of this transit in degrees.
Arguments:
- tar: the target as a skyfield Star object.
- t0: start time as a skyfield time object.
- t1: end time (should be ~24hrs later) as a skyfield time object.
Output:
t_time: time that the object transits in UTC as a skyfield time object.
t_alt: altitude in degrees that the object transits (float).
"""
#function that calculates transit
f = almanac.meridian_transits(eph, tar, location)
t, y = almanac.find_discrete(t_start, t_end, f)
#t is times of transit,
#y is array with 0 for antimerdian transit and 1 for meridian transit (which we are intrested in)
#so t_time is the element at the same index as 1 in y in the t array
meridian_index = np.where(y==1)[0]
t_time = t[meridian_index]
#now need to find altitude of star at this time
astro = Epos.at(t_time).observe(tar)
app = astro.apparent()
alt, az, distance = app.altaz()
t_alt = alt.degrees
return t_time[0], t_alt[0]
def alt2HA(alt,lt,dec):
'''
Function that calculates the absolute value of the hour angle of a target for at a specified altitude, given the latitude of the location and the declination of the target.
Arguments:
- alt: the altitude you want to find the value of the HA at, in decimal degrees (float).
- lt: the latitude of the location you are observing the target, in decimal degrees (float).
- dec: the declination of the target, in decimal degrees (float)
Output:
- HA: the absolute value of the HA of the target at the specified altitude in decimal hours (float).
'''
#convert dec, lat and alt into radian
altR = np.radians(alt)
latR = np.radians(lt)
decR = np.radians(dec)
#find the hour angle of the
cosHAnum = np.sin(altR) - (np.sin(latR)*np.sin(decR)) #numerator of cos(HA)
cosHAden = np.cos(latR)*np.cos(decR) #denominator of cos(HA)
cosHA = cosHAnum/cosHAden
if cosHA > 1: #i.e., target never reaches 35 degs
HA = None
else:
#find the hour angle using arccos
HAdeg = np.degrees(np.arccos(cosHA)) #hour angle in degrees
HA = HAdeg/15 #hour angle in hours
return HA
def obs_time(dt_start,dt_end,rise_t,set_t):
"""
Function that calculates how long a target is visible given the times dark time starts and ends, and
the times the target rises above and sets below a certain altitude. Note all times need to be in
the same timezone, idealy UTC.
Arguments:
- dt_start: the time dark time starts as a skyfield time object.
- dt_end: the time dark time ends as a skyfield time object.
- rise_t: the time the target rises above the certain altitude.
- set_t: the time the target sets below the certain altitude.
Output:
t_obs: the time the target is obserable in dark time, as a decimal hour (float).
"""
#convert the times into datetime objects
dt_start, dt_end = dt_start.utc_datetime(), dt_end.utc_datetime()
rise_t, set_t = rise_t.utc_datetime(), set_t.utc_datetime()
## Now need to carry out the flow chart described above ##
#first check is rise_t greater than dt_start
if rise_t > dt_start:
#if true then target rises after dark time starts
#next check: is rise_t greater than dt_end
if rise_t > dt_end:
#if true target rises and sets after dark time, so cant observe
t_obs = 0
else:
#if false the target rises in dark time
#next check: is set_t greater than dt_end
if set_t > dt_end:
#if true then target rises in dark time and then sets after
#so observable time is end of dark time minus rise time
t_obs = (dt_end - rise_t).seconds/3600 #have to divide by 3600 to get in hours
else:
#if false then target rises and sets in dark time
#so observable time is just the time it is above the certain altitude
t_obs = (set_t - rise_t).seconds/3600
else:
#if false target rises before dark time
#next check: is set_t greater than dt_start
if set_t > dt_start:
#if true then the target sets in dark time
#next check: is set_t greater than dt_end
if set_t > dt_end:
#if true then target rises before dark time and sets after it
#so observable time is the length of dark time
t_obs = (dt_end - dt_start).seconds/3600
else:
#if false then target rises before dark time and sets in dark time
#so observable time is the set time minus the start of dark time
t_obs = (set_t - dt_start).seconds/3600
else:
#if false then target rises and sets before dark time starts, so cant observe
t_obs = 0
return t_obs
## CALCULATING OUTPUTS ##
#empty lists to fill
tObs, lSep = [], []
#find altitude of each transient at sunset, start of dark time, end of darktime, and sunrise
for k in range(ra.size):
#RA and Dec of target in decimal degrees converted from string
RA = float(ra[k])
Dec = float(dec[k])
#convert RA in decimal degrees to RA in hrs, mins, secs
raH = RA/15 #RA decimal degrees to decimal hours
ra_hours = int(raH) #hours of RA
raM = (raH-ra_hours)*60 #decimal mintues of RA
ra_mins = int(raM) #mintues of RA
ra_secs = (raM-ra_mins)*60 #seconds of RA
#convert DEC in decimal degrees to dec in degs, arcmins, arcsecs
dec_degs = int(Dec) #degs of dec
decM = (Dec-dec_degs)*60 #decimal arcmintues of dec
dec_mins = int(decM) #arcmintues of dec
dec_secs = (decM-dec_mins)*60 #arcseconds of dec
#cretes star object from RA and Dec which represents the transient
target = Star(ra_hours=(ra_hours, ra_mins, ra_secs),dec_degrees=(dec_degs, dec_mins, dec_secs))
#finding the UTC time the target transits the meridian
trans_time, trans_alt = transit_time(target,t0,t1)
# if the transit altitude is less than 35 then cant observe it #
if trans_alt < 35:
t_obs = 0
asep = 0 #set lunar separation to be zero also
else: #otherwise can continue
# find the HA of target at 35 degs
HA = alt2HA(35,lat,Dec)
if HA == None: #target never rises above 35 degrees
t_obs = 0
asep = 0 #set lunar separation to be zero also
else:
## Find the time target rises above 35 and then sets below 35 using the HA and transit time ##
rise35 = trans_time - dt.timedelta(hours=HA)
set35 = trans_time + dt.timedelta(hours=HA)
above35 = ((set35.utc_datetime()-rise35.utc_datetime()).seconds)/3600 #time above 35 degs
#find the observable time f the target
t_obs = obs_time(darkstart,darkend,rise35,set35)
if t_obs > 0: #if non zero time for observing then can calculate the lunar separation
a_seps = []
for DT in darktimes:
# angular separation doesn't depend on location on earth just time
e = earth.at(DT) #set earth as centre
m = e.observe(moon) #observe moon at time DT
T = e.observe(target) #observe target at time DT
a_sep = m.separation_from(T).degrees #find angular separation in degrees
a_seps.append(a_sep) #append to list
asep = np.mean(a_seps) #find mean angular separation during darktime
else: #if no observable time then don't need to calculate the angular sep
asep = 0
# add to the lists
tObs.append(t_obs)
lSep.append(asep)
#return and convert to numpy arrays, along with lunar illumination
return np.array(tObs), np.array(lSep), mill
################################################################################
def xmatch_rm(tlist):
'''Function to remove any transients from a list if they are too close to a target in a catalogue.
Arguments:
- tlist: array representing the targets with RA and DEC in column indices 3 and 4
Outpts:
- new_tlist: list of transients with the those to close to catalogue objects removed
'''
### Internal Functions ###
def host_name(xtable,host_idx):
"""
Finds name of host galaxy that cross-mathced with a transient.
Arguments:
- xtable: table produced by query to ViziR
- host_idx: index of the host in the in xtable
Output:
- name: name of the host galaxy as a string
"""
#names of the columns headers
namecols = xtable[0].colnames[1:]
for header in namecols:
entry = str(xtable[0][header][host_idx])
if (entry == "-") or (entry == "--"):
if header == "WISExSCOS":
#deafult to GLADE name after 2MASS
name = f"GLADE {str(xtable[0]['GLADE_'][host_idx])}"
else:
#go to next column
continue
else:
if header == "PGC":
name = f"{header} {entry}"
elif header == "GWGC":
name = entry
elif header == "HyperLEDA":
if entry.isdigit():
#if the name is a set of digits then need LEDA prefix
name = f"LEDA {entry}"
else:
#if letters in the name then no prefix needed
name = entry
elif header == "_2MASS":
name = f"2MASX {entry}"
else:
#deafult to GLADE name after 2MASS
name = f"GLADE {str(xtable[0]['GLADE_'][host_idx])}"
break
return name
def d25_from_HL(host_name, sep):
"""
Queries Hyper LEDA to find the apparent radius of a galaxy and then checks if hosts a transient.
Arguments:
- host_name: the name of the galaxy
- sep: the separtion of the transient to the galaxy in Astropy units of angle
Outputs:
- appR: the apparent radius of the galaxy in arcsecs as a float (set to None if no radius found)
- hosted: boolean indicating if transient resides within radius of galaxy (set to None if no radius found)
"""
#download whole html code for object page in HyperLEDA website
r = requests.get(f"https://leda.univ-lyon1.fr/ledacat.cgi?o={host_name}")
HLtxt = r.text
#slice out the logd25 value (log of apparent diameter, where d25 is in 0.1 arcmin)
if HLtxt.find(">logd25<") != -1: #if can find d25 value
logd25 = float(HLtxt[HLtxt.find(">logd25<"):HLtxt.find("</td><td>log(0.1 arcmin)")].split()[1])
appR = (10**(logd25-1) * u.arcmin)/2 #apparent radius (hence divide by 2)
if sep > appR:
hosted = False
else:
hosted = True
appR = appR.to(u.arcsec).value
else: #if cannot find a d25 value
appR = None
hosted = None
return appR, hosted
def gal_info(xmatch,idx):
"""
Returns properties of a galaxy from an Astroquery VizieR table.
Arguments:
- xmatch: the Astroquery VizieR table produced via cross-match with a transient
- idx: the index of the galaxy within the table
Outputs:
- name: name of the galaxy
- Bmag: the B-band apparent magnitude of the galaxy
- appR: the apparent radius of the galaxy in arcseconds from Hyper LEDA (set to None if no radius found)
- separ: the separtion between the galaxy and the transient in arcseconds
- hosted: boolean indicating if transient resides within radius of galaxy (set to None if no radius found)
Note - function returns outputs as a list.
"""
#galaxy object
gRA = xmatch[0]["RAJ2000"][idx]
gDEC = xmatch[0]["DEJ2000"][idx]
gal = SkyCoord(ra=gRA*u.deg,dec=gDEC*u.deg)
separ = t.separation(gal).to(u.arcsec)
Bmag = xmatch[0]["Bmag"][idx]
name = host_name(xmatch,idx)
#query hyper leda with name for the apparent radius
appR, hosted = d25_from_HL(name,separ)
return [name,Bmag,appR,separ.value,hosted]
## Transients ##
#names of the transients
tnames = tlist.T[1]+tlist.T[2]
#magnitudes of transients
tmags = tlist.T[7].astype(float)
#extract ra and dec of transients
RAs, DECs = tlist.T[3].astype(float), tlist.T[4].astype(float)
#make skycoords object of the transients in list
transients = SkyCoord(ra=RAs*u.deg,dec=DECs*u.deg)
## Cross Matching ##
Xmatches = []
for idx, t in enumerate(transients):
#cross-match with GLADE+ catalogue via VizieR
xmatch = Vizier.query_region(t, radius = 1*u.arcmin, catalog = 'VII/291/gladep')
if len(xmatch) == 0: #if there is no match within 1 arcmin
Xmatches.append([tnames[idx],tmags[idx],None,None,None,None,None])
else: #if there is a match within 1 arcmin
if len(xmatch[0]) == 1: #if only one match
#find info for that galaxy
galaxy = gal_info(xmatch,0)
else: #if more than one loop through all that matched
galaxies = []
for g in range(len(xmatch[0])):
#find info of particular galaxy
G = gal_info(xmatch,g)
galaxies.append(G) #add info to list
#convert galaxies list to array for masking
g_array = np.array(galaxies,dtype=object)
#mask to extract galaxies with hosted=True
host_mask = g_array.T[-1] == True
#apply mask to get galaxies that host transient
host_gals = g_array[host_mask]
if host_gals.shape[0] != 0:
#if there are host galaxies pick one with lowest sep
g_IDX = host_gals.T[3].argmin() #index of lowest sep galaxy
galaxy = list(host_gals[g_IDX])
else:
#if no host galaxies remove any known not to host transient
ukwn_mask = g_array.T[-1] != False #mask for above task
#apply mask leaving only galaxies didn't get radii for
ukwn_gals = g_array[ukwn_mask]
if ukwn_gals.shape[0] != 0:
#if there are galaxies didn't get radii for pick lowest sep galaxy
g_IDX = ukwn_gals.T[3].argmin() #index of lowest sep galaxy
galaxy = list(ukwn_gals[g_IDX])
else:
#if only galaxies known not to host pick lowest sep one
g_IDX = g_array.T[3].argmin()
galaxy = list(g_array[g_IDX])
#add transient name and magnitude infront of galaxy's info
galaxy.insert(0,tnames[idx])
galaxy.insert(1,tmags[idx])
#add list to cross-matches
Xmatches.append(galaxy)
Xmatches = np.array(Xmatches,dtype=object)
## thresholding ##
mask = []
for entry in Xmatches:
if entry[2] == None:
#discard if there is no match
mask.append(False)
else:
#if is host compare magnitudes
if entry[1] < entry[3]:
#keep if the transient magnitude is brighter than galaxy magnitude
mask.append(True)
else:
if entry[4] == None:
if entry[-2] <= 2:
#if no radius recorded and within 2" of host discard
mask.append(False)
else:
#if is radius but larger sep than seeing limit then keep
mask.append(True)
elif entry[4] == False:
#if galaxy likely doesn't host transient then discard
mask.append(False)
else:
#compare radius and separation
if entry[-2] <= (0.25*entry[4] + 2):
#discard if target within 25% of radius from galaxy centre
#plus 2" to account for seeing
mask.append(False)
else:
#otherwise keep
mask.append(True)
#apply mask to tlist to remove unwanted entries
th_list = tlist[mask]
#apply mask to list of offical names to get possible hosts
new_Xmatches = Xmatches[mask]
hosts = []
for entry in new_Xmatches:
if entry[-1] != False:
hosts.append(entry[2])
else:
hosts.append(None)
hosts = np.array(hosts,dtype="str")
#return new list with possible hosts in second to last columns (before internal name)
return np.concatenate((th_list,np.resize(hosts,(hosts.size,1))),axis=1)
################################################################################
def thresholds(DB,mill):
"""
Removes targets from a database if they don't meet the thresholds of 3 different variables - observable time, lunar separation, and discovery magnitude.
Arguments:
- DB: numpy object array of the list of targets with RA, Dec, discovery magnitude, observable time, and lunar separation in column indices 3, 4, -4, -3, and -2 respectively.
- mill: the illumination percentage of the moon as a float
Output:
- t_array: same database as ingested but with transients removed that don't meet the thresholds set.
"""
#set observable time threshold (min exp time is ~15mins so cant observe
#anything with obs time less than this)
to_th = 0.25
#set lunar separation the threshold (depends on lunar illumination)
if mill < 0.25 : #dark sky
m_th = 10
elif (0.25 <= mill < 0.65): #grey sky
m_th = 20
else: #bright sky
m_th = 40
#set magnitude thresholds
ml_th = 12 #lower threshold
mu_th = 18.5 #upper threshold
#set the absolute value of the galactic latitude below which targets will be disregarded
glat_th = 10
#remove all entries which dont meet the thresholds
bad_idx = []
for idx, entry in enumerate(DB):
if entry[8] <= to_th: #check the observable time of the target
bad_idx.append(idx)
elif entry[9] < m_th: #check the lunar separation
bad_idx.append(idx)
elif (float(entry[7]) < ml_th) or (float(entry[7]) >= mu_th): #check magnitudes
bad_idx.append(idx)
elif abs(float(entry[10])) <= glat_th:
bad_idx.append(idx) #check galactic latitude
th_list = np.delete(DB,bad_idx,0) #deletes rows with no observable time
## Galaxy separations ##
#execute galaxy separation thresholding
t_array = xmatch_rm(th_list)
#return with possible hosts added to end
return t_array
################################################################################
def pscore(database,weights,moon_per):
"""
Filters a database of targets by removing all those with zero observable time and then calculates the rest's priority score, which depends on the target's ranking in observable time, transit altitude, lunar separation, brightness and time since discovery. The filtered database is then saved as a numpy array with the priority scores as the final column.
Arguments:
- database: numpy object array of the list of targets (ID first column and the observable time, and lunar separation in last 3 columns)
- weights: list of numbers to weight the contributions towards the priority score for the observable time, transit altitude, and lunar separation
- moon_per: percentage illumination of the moon used to set threshold for the lunar separation
Outputs:
- t_targets: new numpy object array with the remaining targets and their priority scores in the final column
"""
#remove all entries that don't fit within the thresholds
t_array = thresholds(database,moon_per)
#check the lenth of the thresholded array
if t_array.size == 0:
#if all targets were removed just return none and end function
print("none")
return t_array
if t_array.shape[0] == 1:
#if only one transient remains after cuts then don't calcuate pscore
pscores = np.array([[0]])
else:
#save remaining IDs
IDs = t_array.T[0]
#convert strings into useable quantities
disc = np.array([dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") for date in t_array.T[5]])
#discovery dates as datetime objects
mag = np.array(t_array.T[7],dtype="float") #magnitudes as floats
tobs = np.array(t_array.T[8],dtype="float") #observable time as decimal hour
lsep = np.array(t_array.T[9],dtype="float") #lunar separation as decimal angle
varbs = [tobs,lsep,mag,disc] #list containing the variables needed to calculate the pscores
#makes a ordered list of each variable with their ID
ivarbs = []
for varb in varbs:
I = np.concatenate((np.resize(IDs,(IDs.size,1)),np.resize(varb,(varb.size,1))),axis=1)
#sort the array by ascending priority score (column index = -1)
I = I[I[:, -1].argsort()]
ivarbs.append(I)
#calculate the pscores
pscores = []
sz = IDs.size
for ID in IDs:
#looks for where the ID is in each of the sorted lists
#then calculates its score by doing (size of the array - index)
#this is so that high index IDs (i.e., higher values) get lower pscore which = higher priority
scores = []
scores.append(sz-np.where(ivarbs[0]==ID)[0][0])
scores.append(sz-np.where(ivarbs[1]==ID)[0][0])
scores.append(np.where(ivarbs[2]==ID)[0][0]) #no subtraction as we want the brightest objects (low mag)
scores.append(sz-np.where(ivarbs[3]==ID)[0][0])
#combine scores for different variables into one and apply weightings
score = sum(np.array(scores)*np.array(weights))
#weights applied by multiplication so some variables will contribute more to the final score
pscores.append(score)
pscores = np.array(pscores,dtype=int)
#normalise so scores are between 0 (high) and 5 (low)
pscores=((pscores-np.min(pscores))/np.max(pscores-np.min(pscores)) * 5)
#concatenate the IDs, variables and the pscores
t_targets = np.concatenate((t_array,np.resize(pscores,(pscores.size,1))),axis=1)
#order concatenated list by pscore in descending order
t_targets = t_targets[t_targets[:, -1].argsort()]
return t_targets
################################################################################
def flatten(l):
"Flattens a list of lists, l"
return [item for sublist in l for item in sublist]
################################################################################
def priority_list(database,date,Slow=True):
"""
Slices the TNS database to extract only the targets discovered or modififed in a certain time frame in the past. It then calculates the observable time and lunar separation of these targets which along with their discovery magnitude and date are used to calculate their priority scores.
Arguments:
- database: numpy array of the data from TNS database which holds one entry per line
- date: the date extracted from the top of the TNS database CSV file (string with format YY-MM-DD HH:MM:SS)
- Slow: string dictating if calculating priority scores for PEPPER Fast or PEPPER Slow surveys (default is True - i.e., PEPPER Slow. Set to False for PEPPER Fast)
Outputs:
- targets: numpy array consisiting of the revelant targets and their priority scores
- Rows are: ['objid','name_prefix','name','ra','declination','discoverydate','lastmodified',
'discoverymag','observable_time','lunar_sep','priority_score']
"""
if type(Slow) != bool:
print("Priority score list not created - variable Slow was not set to a is boolean value.")
exit()
else:
# slice the database accordingly #
#extarct modification date and discovery date
t_mod = database.T[-1]
t_disc = database.T[12]
#set different times since modification/discovery for PEPPER Fast and Slow
if Slow == False:
rdate = dt.datetime.strptime(date, '%Y-%m-%d %H:%M:%S') #slice from date TNS updated
moddiff = rdate - dt.timedelta(days=3) #3 days ago
discdiff = rdate - dt.timedelta(weeks=1) #1 week ago
else: #i.e., slow
rdate = dt.datetime.combine(dt.datetime.now(), dt.datetime.min.time()) #slice from today at midnight
moddiff = rdate - dt.timedelta(weeks=2) #2 weeks ago
discdiff = rdate - dt.timedelta(weeks=12) #3 months ago (aka 12 weeks)
#slice
good_tars = [] #empty list to save rows which pass the slicing to
for i in range(len(t_mod)):
#convert times to datetime objects
tMOD = dt.datetime.strptime(t_mod[i], '%Y-%m-%d %H:%M:%S')
tDISC = dt.datetime.strptime(t_disc[i], '%Y-%m-%d %H:%M:%S.%f')
#if the modification date is less than 2days ago then can include
if tMOD > moddiff:
if tDISC > discdiff:
good_tars.append(database[i])
DB = np.array(good_tars,dtype=object)
# calculate priority scores from weightings #
#variables of relevant info from sliced database
IDs = DB.T[0] #TNS IDs
prefix, name = DB.T[1], DB.T[2] #TNS name and prefix
ra, dec = DB.T[3], DB.T[4] #RA and dec of targets
t_disc, t_mod = DB.T[12],DB.T[-1] #time of discovery and modification of targets
mags = DB.T[13] #disoovery magnitudes of targets
it_names = DB.T[-3] #internal names of the targets
#location of Liverpool Telescope
lat = 28.6468866 #latitude in degs
long = -17.7742491 #longitude in degs
elv = 2326.0 #elevation in metres
#calculate observable time and lunar separation of targets
t_obs, l_sep, l_per = Visibility(ra, dec, lat, long, elv)
## calculate the galactic latitudes ##
Glat = []
#loop thru all targets from list
for i in range(DB.shape[0]):
#create a skyfield position object from ra and dec of target
position = Apparent.from_radec(ra_hours=float(ra[i])/15, dec_degrees=float(dec[i]))
#find galactic lat and long from position object
Glt, Gln, dummy = position.frame_latlon(galactic_frame)
Glat.append(Glt.degrees)
Glat = np.array(Glat) #convert from list to array
#new databse with all relevant information
newDB = np.array([IDs,prefix,name,ra,dec,t_disc,t_mod,mags,t_obs,l_sep,Glat,it_names]).T
#different weightings for PEPPER Fast and Slow
if Slow == False:
wghts = [2,3,8,10] #priortise discovery date and magnitude; least obs_time
else: #i.e., slow
wghts = [10,7,8,3] #priortise observable time and magnitude
#create database with pscores
pDB = pscore(newDB,wghts,l_per)
#check pDB to see if none value
if pDB.size == 0:
return pDB
# urls to last column #
int_names = pDB.T[-3] # locally saved internal names of targets
#make the url by finding the ZTF name (if it exsists)
urls = []
for entry in int_names:
#check the target has ZTF internal name at all
if "ZTF" in entry:
if "," not in entry: #i.e., only internal name is ZTF name
url = "https://fink-portal.org/"+entry
else: #if it has multiple internal names
stidx = entry.index("ZTF")+3 #find index where ZTF names starts (after ZTF bit)
letter = entry[stidx] #first character of ZTF name
name = "ZTF"
#loop through name until get to comma which indicates it has ended
while (letter != ",") and (stidx < len(entry)-1):
name += letter
stidx +=1
letter = entry[stidx]
url = "https://fink-portal.org/"+name
else:
url = ""
urls.append(url)
urls = np.array(urls)
# combine together and array #
targets = np.delete(pDB.T,-3,0).T #remove internal name column
targets = np.concatenate((targets,np.resize(urls,(urls.size,1))),axis=1) #add urls to databse
return targets
######################### FUNCTIONS FOR EMAILS ETC. ############################
def csv2list(fname):
"""
This function takes in anysimple CSV file where each row contains a single entry and converts it into a list.
Arguments:
- fname: the path to the CSV file
Output:
- lst: a list where each element is a row of the CSV file
"""
with open(fname) as file:
lst = []
csvreader = csv.reader(file)
for row in csvreader: