forked from desihub/desisim
-
Notifications
You must be signed in to change notification settings - Fork 0
/
quickquasars.py
1012 lines (841 loc) · 44.8 KB
/
quickquasars.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from __future__ import absolute_import, division, print_function
import sys, os
import argparse
import time
import warnings
import numpy as np
from scipy.constants import speed_of_light
from scipy.stats import cauchy
from astropy.table import Table,Column
import astropy.io.fits as pyfits
import multiprocessing
import healpy
from desiutil.log import get_logger
from desispec.io.util import write_bintable
from desispec.io.fibermap import read_fibermap
from desisim.simexp import reference_conditions
from desisim.templates import SIMQSO, QSO
from desisim.scripts.quickspectra import sim_spectra
from desisim.lya_spectra import read_lya_skewers , apply_lya_transmission, apply_metals_transmission, lambda_RF_LYA
from desisim.dla import dla_spec,insert_dlas
from desisim.bal import BAL
from desisim.io import empty_metatable
from desisim.eboss import FootprintEBOSS, sdss_subsample, RedshiftDistributionEBOSS, sdss_subsample_redshift
from desispec.interpolation import resample_flux
from desimodel.io import load_pixweight
from desimodel import footprint
from speclite import filters
from desitarget.cuts import isQSO_colors
from desiutil.dust import SFDMap, ext_odonnell
try:
c = speed_of_light/1000. #- km/s
except TypeError:
#
# This can happen in documentation builds.
#
c = 299792458.0/1000.0
def parse(options=None):
parser=argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Fast simulation of QSO Lya spectra into the final DESI format\
(Spectra class) that can be directly used as an input to the redshift fitter\
(redrock) or correlation function code (picca). The input file is a Lya\
transmission skewer fits file which format is described in\
https://desi.lbl.gov/trac/wiki/LymanAlphaWG/LyaSpecSim.")
#- Required
parser.add_argument('-i','--infile', type=str, nargs= "*", required=True, help="Input skewer healpix fits file(s)")
parser.add_argument('-o','--outfile', type=str, required=False, help="Output spectra (only used if single input file)")
parser.add_argument('--outdir', type=str, default=".", required=False, help="Output directory")
#- Optional observing conditions to override program defaults
parser.add_argument('--program', type=str, default="DARK", help="Program (DARK, GRAY or BRIGHT)")
parser.add_argument('--seeing', type=float, default=None, help="Seeing FWHM [arcsec]")
parser.add_argument('--airmass', type=float, default=None, help="Airmass")
parser.add_argument('--exptime', type=float, default=None, help="Exposure time [sec]")
parser.add_argument('--moonfrac', type=float, default=None, help="Moon illumination fraction; 1=full")
parser.add_argument('--moonalt', type=float, default=None, help="Moon altitude [degrees]")
parser.add_argument('--moonsep', type=float, default=None, help="Moon separation to tile [degrees]")
parser.add_argument('--seed', type=int, default=None, required = False, help="Global random seed (will be used to generate a seed per each file")
parser.add_argument('--skyerr', type=float, default=0.0, help="Fractional sky subtraction error")
parser.add_argument('--downsampling', type=float, default=None,help="fractional random down-sampling (value between 0 and 1)")
parser.add_argument('--zmin', type=float, default=0, help="Min redshift")
parser.add_argument('--zmax', type=float, default=10, help="Max redshift")
parser.add_argument('--wmin', type=float, default=3500, help="Min wavelength (obs. frame)")
parser.add_argument('--wmax', type=float, default=10000, help="Max wavelength (obs. frame)")
parser.add_argument('--dwave', type=float, default=0.2, help="Internal wavelength step (don't change this)")
parser.add_argument('--dwave_desi', type=float, default=0.8, help="Output wavelength step for DESI mocks)")
parser.add_argument('--zbest', action = "store_true", help="add a zbest file per spectrum either with the truth\
redshift or adding some error (optionally use it with --sigma_kms_fog and/or --gamma_kms_zfit)")
parser.add_argument('--sigma_kms_fog',type=float,default=150, help="Adds a gaussian error to the quasar \
redshift that simulate the fingers of god effect")
parser.add_argument('--gamma_kms_zfit',nargs='?',type=float,const=400,help="Adds a Lorentzian distributed shift\
to the quasar redshift, to simulate the redshift fitting step. E.g. --gamma_kms_zfit 400 will use a gamma \
parameter of 400 km/s . If a number is not specified, a value of 400 is used.")
parser.add_argument('--shift_kms_los',type=float,default=0,help="Adds a shift to the quasar redshift written in\
the zbest file (in km/s)")
parser.add_argument('--target-selection', action = "store_true" ,help="apply QSO target selection cuts to the simulated quasars")
parser.add_argument('--mags', action = "store_true", help="DEPRECATED; use --bbflux")
parser.add_argument('--bbflux', action = "store_true", help="compute and write the QSO broad-band fluxes in the fibermap")
parser.add_argument('--add-LYB', action='store_true', help = "Add LYB absorption from transmision file")
parser.add_argument('--metals', type=str, default=None, required=False, help = "list of metals to get the\
transmission from, if 'all' runs on all metals", nargs='*')
#parser.add_argument('--metals-from-file', action = 'store_true', help = "add metals from HDU in file")
parser.add_argument('--metals-from-file',type=str,const='all',help = "list of metals,'SI1260,SI1207' etc, to get from HDUs in file. \
Use 'all' or no argument for mock version < 7.3 or final metal runs. ",nargs='?')
parser.add_argument('--dla',type=str,required=False, help="Add DLA to simulated spectra either randonmly\
(--dla random) or from transmision file (--dla file)")
parser.add_argument('--balprob',type=float,required=False, help="To add BAL features with the specified probability\
(e.g --balprob 0.5). Expect a number between 0 and 1 ")
parser.add_argument('--no-simqso',action = "store_true", help="Does not use desisim.templates.SIMQSO\
to generate templates, and uses desisim.templates.QSO instead.")
parser.add_argument('--save-continuum',action = "store_true", help="Save true continum to file")
parser.add_argument('--save-continuum-dwave',type=float, default=2, help="Delta wavelength to save true continum")
parser.add_argument('--desi-footprint', action = "store_true" ,help="select QSOs in DESI footprint")
parser.add_argument('--eboss',action = 'store_true', help='Setup footprint, number density, redshift distribution,\
and exposure time to generate eBOSS-like mocks')
parser.add_argument('--extinction',action='store_true',help='Adds Galactic extinction')
parser.add_argument('--no-transmission',action = 'store_true', help='Do not multiply continuum\
by transmission, use F=1 everywhere')
parser.add_argument('--nproc', type=int, default=1,help="number of processors to run faster")
parser.add_argument('--overwrite', action = "store_true" ,help="rerun if spectra exists (default is skip)")
parser.add_argument('--nmax', type=int, default=None, help="Max number of QSO per input file, for debugging")
parser.add_argument('--save-resolution',action='store_true', help="Save full resolution in spectra file. By default only one matrix is saved in the truth file.")
parser.add_argument('--dn_dzdm', type=str, default=None, help="File containing the number of quasars by redshift and magnitude (dN/dzdM) bin to be sampled")
parser.add_argument('--source-contr-smoothing', type=float, default=10., \
help="When this argument > 0 A, source electrons' contribution to the noise is smoothed " \
"by a Gaussian kernel using FFT. Pipeline does this by 10 A. " \
"Larger smoothing might be needed for better decoupling. Does not apply to eBOSS mocks.")
if options is None:
args = parser.parse_args()
else:
args = parser.parse_args(options)
return args
def mod_cauchy(loc,scale,size,cut):
samples=cauchy.rvs(loc=loc,scale=scale,size=3*size)
samples=samples[abs(samples)<cut]
if len(samples)>=size:
samples=samples[:size]
else:
samples=mod_cauchy(loc,scale,size,cut) ##Only added for the very unlikely case that there are not enough samples after the cut.
return samples
def get_spectra_filename(args,nside,pixel):
if args.outfile :
return args.outfile
filename="{}/{}/spectra-{}-{}.fits".format(pixel//100,pixel,nside,pixel)
return os.path.join(args.outdir,filename)
def get_zbest_filename(args,pixdir,nside,pixel):
if args.zbest :
return os.path.join(pixdir,"zbest-{}-{}.fits".format(nside,pixel))
return None
def get_truth_filename(args,pixdir,nside,pixel):
return os.path.join(pixdir,"truth-{}-{}.fits".format(nside,pixel))
def is_south(dec):
"""Identify which QSOs are in the south vs the north, since these are on
different photometric systems. See
https://github.com/desihub/desitarget/issues/353 for details.
"""
return dec <= 32.125 # constant-declination cut!
def get_healpix_info(ifilename):
"""Read the header of the tranmission file to find the healpix pixel, nside
and if we are lucky the scheme. If it fails, try to guess it from the
filename (for backward compatibility).
Args:
ifilename: full path to input transmission file
Returns:
healpix: HEALPix pixel corresponding to the file
nside: HEALPix nside value
hpxnest: Whether HEALPix scheme in the file was nested
"""
log = get_logger()
print('ifilename',ifilename)
healpix=-1
nside=-1
hpxnest=True
hdulist=pyfits.open(ifilename)
if "METADATA" in hdulist :
head=hdulist["METADATA"].header
for k in ["HPXPIXEL","PIXNUM"] :
if k in head :
healpix=int(head[k])
log.info("healpix={}={}".format(k,healpix))
break
for k in ["HPXNSIDE","NSIDE"] :
if k in head :
nside=int(head[k])
log.info("nside={}={}".format(k,nside))
break
for k in ["HPXNEST","NESTED","SCHEME"] :
if k in head :
if k == "SCHEME" :
hpxnest=(head[k]=="NEST")
else :
hpxnest=bool(head[k])
log.info("hpxnest from {} = {}".format(k,hpxnest))
break
hdulist.close()
if healpix >= 0 and nside < 0 :
log.error("Read healpix in header but not nside.")
raise ValueError("Read healpix in header but not nside.")
if healpix < 0 :
vals = os.path.basename(ifilename).split(".")[0].split("-")
if len(vals)<3 :
error_msg="Could not guess healpix info from {}".format(ifilename)
log.error(error_msg)
raise ValueError(error_msg)
try :
healpix=int(vals[-1])
nside=int(vals[-2])
except ValueError:
error_msg="Could not guess healpix info from {}".format(ifilename)
log.error(error_msg)
raise ValueError(error_msg)
log.warning("Guessed healpix and nside from filename, assuming the healpix scheme is 'NESTED'")
return healpix, nside, hpxnest
def get_pixel_seed(pixel, nside, global_seed):
if global_seed is None:
# return a random seed
return np.random.randint(2**32, size=1)[0]
npix=healpy.nside2npix(nside)
np.random.seed(global_seed)
seeds = np.unique(np.random.randint(2**32, size=10*npix))[:npix]
pixel_seed = seeds[pixel]
return pixel_seed
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters,
bassmzls_and_wise_filters,footprint_healpix_weight,
footprint_healpix_nside,
bal=None,sfdmap=None,eboss=None) :
log = get_logger()
# open filename and extract basic HEALPix information
pixel, nside, hpxnest = get_healpix_info(ifilename)
# using global seed (could be None) get seed for this particular pixel
global_seed = args.seed
seed = get_pixel_seed(pixel, nside, global_seed)
# use this seed to generate future random numbers
np.random.seed(seed)
# get output file (we will write there spectra for this HEALPix pixel)
ofilename = get_spectra_filename(args,nside,pixel)
# get directory name (we will also write there zbest file)
pixdir = os.path.dirname(ofilename)
# get filename for truth file
truth_filename = get_truth_filename(args,pixdir,nside,pixel)
# get filename for zbest file
zbest_filename = get_zbest_filename(args,pixdir,nside,pixel)
if not args.overwrite :
# check whether output exists or not
if args.zbest :
if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) :
log.info("skip existing {} and {}".format(ofilename,zbest_filename))
return
else : # only test spectra file
if os.path.isfile(ofilename) :
log.info("skip existing {}".format(ofilename))
return
# create sub-directories if required
if len(pixdir)>0 :
if not os.path.isdir(pixdir) :
log.info("Creating dir {}".format(pixdir))
os.makedirs(pixdir)
if not eboss is None:
dwave_out = None
else:
dwave_out = args.dwave_desi
log.info("Read skewers in {}, random seed = {}".format(ifilename,seed))
# Read transmission from files. It might include DLA information, and it
# might add metal transmission as well (from the HDU file).
log.info("Read transmission file {}".format(ifilename))
trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file,add_lyb=args.add_LYB)
### Add Finger-of-God, before generate the continua
log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog))
DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size)
metadata['Z'] += DZ_FOG
### Select quasar within a given redshift range
w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax)
transmission = transmission[w]
metadata = metadata[:][w]
DZ_FOG = DZ_FOG[w]
# option to make for BOSS+eBOSS
if not eboss is None:
if args.downsampling or args.desi_footprint:
raise ValueError("eboss option can not be run with "
+"desi_footprint or downsampling")
# Get the redshift distribution from SDSS
selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift'])
log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum()))
if selection.sum()==0:
log.warning("No intersection with BOSS+eBOSS redshift distribution")
return
transmission = transmission[selection]
metadata = metadata[:][selection]
DZ_FOG = DZ_FOG[selection]
# figure out the density of all quasars
N_highz = metadata['Z'].size
# area of healpix pixel, in degrees
area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True)
input_highz_dens_deg2 = N_highz/area_deg2
selection = sdss_subsample(metadata["RA"], metadata["DEC"],
input_highz_dens_deg2,eboss['footprint'])
log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size))
if selection.size == 0 :
log.warning("No intersection with BOSS+eBOSS footprint")
return
transmission = transmission[selection]
metadata = metadata[:][selection]
DZ_FOG = DZ_FOG[selection]
if args.desi_footprint :
footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"])
selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0]
log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size))
if selection.size == 0 :
log.warning("No intersection with DESI footprint")
return
transmission = transmission[selection]
metadata = metadata[:][selection]
DZ_FOG = DZ_FOG[selection]
# Redshift distribution resample to match the one in file give by args.dn_dzdm
if eboss is None:
log.info("Resampling to redshift distribution from {}".format(args.dn_dzdm))
rnd_state = np.random.get_state()
hdul_dn_dzdm=pyfits.open(args.dn_dzdm)
zcenters=hdul_dn_dzdm['Z_CENTERS'].data
dz = 0.5*(zcenters[1]-zcenters[0]) # Get bin size of the distribution
dn_dzdm=hdul_dn_dzdm['dn_dzdm'].data
dn_dz=dn_dzdm.sum(axis=1) # Table has a redshift bin by row.
z=metadata['Z']
pixarea=healpy.pixelfunc.nside2pixarea(nside,degrees=True)
# Turn old distribution into new distribution
selection_z=[] #Initialize array to select qsos
for z_bin, dndz_bin in zip(zcenters,dn_dz):
nqso_bin=np.ceil(pixarea*dndz_bin).astype(int)
w_z = (z>=z_bin-dz)&(z<z_bin+dz)
nqso_orig = len(z[w_z])
if nqso_orig==0:
continue # If no QSOs in that bin, skip
idx = np.where(w_z)[0]
downsampling_bin = nqso_bin/nqso_orig
if downsampling_bin<1:
# Drop QSOs if the number of disponible QSOs exceeds the wanted distribution
w_idx = np.random.uniform(size=nqso_orig)<downsampling_bin
idx = idx[w_idx]
else:
log.warning("QSOs in redshift bin {} for pixel {} not enough for sampling distribution".format(z_bin, pixel))
selection_z+=list(idx)
np.random.set_state(rnd_state)
log.info("Resampling redshift distribution {}->{}".format(len(z),len(selection_z)))
transmission = transmission[selection_z]
metadata = metadata[:][selection_z]
DZ_FOG = DZ_FOG[selection_z]
nqso=transmission.shape[0]
if args.downsampling is not None :
if args.downsampling <= 0 or args.downsampling > 1 :
log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0]
if indices.size == 0 :
log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso))
return
transmission = transmission[indices]
metadata = metadata[:][indices]
DZ_FOG = DZ_FOG[indices]
nqso = transmission.shape[0]
if args.nmax is not None :
if args.nmax < nqso :
log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax))
# take a random subsample
indices = np.random.choice(np.arange(nqso),args.nmax,replace=False)
transmission = transmission[indices]
metadata = metadata[:][indices]
DZ_FOG = DZ_FOG[indices]
nqso = args.nmax
if eboss is None:
log.info("Reading R-band magnitude distribution from {}".format(args.dn_dzdm))
rmagcenters=hdul_dn_dzdm['RMAG_CENTERS'].data
drmag = 0.5*(rmagcenters[1]-rmagcenters[0])
rmin = np.min(rmagcenters-drmag)
rmax = np.max(rmagcenters+drmag)
z = metadata['Z']
mag_pdfs=dn_dzdm/dn_dz[:,None] #Getting a probability distribution for each redshift bin
mags = np.zeros(len(z))
log.info("Generating random magnitudes according to distribution".format(args.dn_dzdm))
rnd_state = np.random.get_state()
for i,z_bin in enumerate(zcenters):
w_z = (z>=z_bin-dz)&(z<=z_bin+dz)
if sum(w_z)==0: continue
mags_selected=np.array([])
while mags_selected.size!=mags[w_z].size:
n_todo=mags[w_z].size-mags_selected.size
mag_tmp = np.random.uniform(rmin,rmax,n_todo)
pdf = np.interp(mag_tmp,rmagcenters,mag_pdfs[i])
w_r = np.random.uniform(0,1,n_todo)<pdf/np.max(mag_pdfs[i])
mags_selected=np.concatenate((mags_selected,mag_tmp[w_r]))
mags[w_z] = np.array(mags_selected)
np.random.set_state(rnd_state)
assert not np.any(mags==0)
# In previous versions of the London mocks we needed to enforce F=1 for
# z > z_qso here, but this is not needed anymore. Moreover, now we also
# have metal absorption that implies F < 1 for z > z_qso
#for ii in range(len(metadata)):
# transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0
# if requested, add DLA to the transmission skewers
if args.dla is not None :
# if adding random DLAs, we will need a new random generator
if args.dla=='random':
log.info('Adding DLAs randomly')
random_state_just_for_dlas = np.random.RandomState(seed)
elif args.dla=='file':
log.info('Adding DLAs from transmission file')
else:
log.error("Wrong option for args.dla: "+args.dla)
sys.exit(1)
# if adding DLAs, the information will be printed here
dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel))
dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[]
# identify minimum Lya redshift in transmission files
min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1)
# loop over quasars in pixel
for ii in range(len(metadata)):
# quasars with z < min_z will not have any DLA in spectrum
if min_lya_z>metadata['Z'][ii]: continue
# quasar ID
idd=metadata['MOCKID'][ii]
dlas=[]
if args.dla=='file':
for dla in dla_info[dla_info['MOCKID']==idd]:
# Adding only DLAs with z < zqso
if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue
dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID']))
transmission_dla = dla_spec(trans_wave,dlas)
elif args.dla=='random':
dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas)
for idla in dlas:
idla['dlaid']+=idd*1000 #Added to have unique DLA ids. Same format as DLAs from file.
# multiply transmissions and store information for the DLA file
if len(dlas)>0:
transmission[ii] = transmission_dla * transmission[ii]
dla_z += [idla['z'] for idla in dlas]
dla_NHI += [idla['N'] for idla in dlas]
dla_id += [idla['dlaid'] for idla in dlas]
dla_qid += [idd]*len(dlas)
log.info('Added {} DLAs'.format(len(dla_id)))
# write file with DLA information
if len(dla_id)>0:
dla_meta=Table()
dla_meta['NHI'] = dla_NHI
dla_meta['Z_DLA'] = dla_z #This is Z_DLA_RSD in transmision.
dla_meta['TARGETID']=dla_qid
dla_meta['DLAID'] = dla_id
hdu_dla = pyfits.convenience.table_to_hdu(dla_meta)
hdu_dla.name="DLA_META"
del(dla_meta)
log.info("DLA metadata to be saved in {}".format(truth_filename))
else:
hdu_dla=pyfits.PrimaryHDU()
hdu_dla.name="DLA_META"
# if requested, extend transmission skewers to cover full spectrum
if args.target_selection or args.bbflux :
wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2
if trans_wave[0]>wanted_min_wave :
log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1])))
# pad with ones at short wavelength, we assume F = 1 for z <~ 1.7
# we don't need any wavelength resolution here
new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave)
new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
new_transmission[:,2:] = transmission
trans_wave = new_trans_wave
transmission = new_transmission
if trans_wave[-1]<wanted_max_wave :
log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave)))
# pad with ones at long wavelength because we assume F = 1
coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1
new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n))
new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
new_transmission[:,:trans_wave.size] = transmission
trans_wave = new_trans_wave
transmission = new_transmission
# whether to use QSO or SIMQSO to generate quasar continua. Simulate
# spectra in the north vs south separately because they're on different
# photometric systems.
south = np.where( is_south(metadata['DEC']) )[0]
north = np.where( ~is_south(metadata['DEC']) )[0]
meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso)
if args.no_simqso:
log.info("Simulate {} QSOs with QSO templates".format(nqso))
tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4')
tmp_qso_wave = np.zeros_like(tmp_qso_flux)
else:
log.info("Simulate {} QSOs with SIMQSO templates".format(nqso))
tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4')
tmp_qso_wave = model.basewave
for these, issouth in zip( (north, south), (False, True) ):
# number of quasars in these
nt = len(these)
if nt<=0: continue
if not eboss is None:
# for eBOSS, generate only quasars with r<22
magrange = (17.0, 21.3)
_tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
= model.make_templates(nmodel=nt,
redshift=metadata['Z'][these], magrange=magrange,
lyaforest=False, nocolorcuts=True,
noresample=True, seed=seed, south=issouth)
else:
_tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
= model.make_templates(nmodel=nt,
redshift=metadata['Z'][these],mag=mags[these],
lyaforest=False, nocolorcuts=True,
noresample=True, seed=seed, south=issouth)
_meta['TARGETID'] = metadata['MOCKID'][these]
_qsometa['TARGETID'] = metadata['MOCKID'][these]
meta[these] = _meta
qsometa[these] = _qsometa
tmp_qso_flux[these, :] = _tmp_qso_flux
if args.no_simqso:
tmp_qso_wave[these, :] = _tmp_qso_wave
log.info("Resample to transmission wavelength grid")
qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size))
if args.no_simqso:
for q in range(tmp_qso_flux.shape[0]) :
qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q])
else:
for q in range(tmp_qso_flux.shape[0]) :
qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q])
tmp_qso_flux = qso_flux
tmp_qso_wave = trans_wave
if args.save_continuum :
true_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.save_continuum_dwave)+1)
true_flux=np.zeros((tmp_qso_flux.shape[0],true_wave.size))
for q in range(tmp_qso_flux.shape[0]) :
true_flux[q]=resample_flux(true_wave,tmp_qso_wave,tmp_qso_flux[q])
continum_meta=Table()
continum_meta['TARGETID'] = qsometa['TARGETID']
continum_meta['TRUE_CONT'] = true_flux
hdu_trueCont = pyfits.convenience.table_to_hdu(continum_meta)
hdu_trueCont.name = "TRUE_CONT"
hdu_trueCont.header['wmin'] = args.wmin
hdu_trueCont.header['wmax'] = args.wmax
hdu_trueCont.header['dwave'] = args.save_continuum_dwave
del(continum_meta,true_wave,true_flux)
log.info("True continum to be saved in {}".format(truth_filename))
# if requested, add BAL features to the quasar continua
if args.balprob:
if args.balprob <= 1. and args.balprob > 0:
from desisim.io import find_basis_template
log.info("Adding BALs with probability {}".format(args.balprob))
# save current random state
rnd_state = np.random.get_state()
tmp_qso_flux,meta_bal = bal.insert_bals(tmp_qso_wave, tmp_qso_flux, metadata['Z'],
balprob= args.balprob, seed=seed, qsoid=metadata['MOCKID'])
# restore random state to get the same random numbers later
# as when we don't insert BALs
np.random.set_state(rnd_state)
w = np.in1d(qsometa['TARGETID'], meta_bal['TARGETID'])
qsometa['BAL_TEMPLATEID'][w] = meta_bal['BAL_TEMPLATEID']
hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META"
#Trim to only show the version, assuming it is located in os.environ['DESI_BASIS_TEMPLATES']
hdu_bal.header["BALTEMPL"]=find_basis_template(objtype='BAL').split('basis_templates/')[1]
del meta_bal
else:
balstr=str(args.balprob)
log.error("BAL probability is not between 0 and 1 : "+balstr)
sys.exit(1)
# Multiply quasar continua by transmitted flux fraction
# (at this point transmission file might include Ly-beta, metals and DLAs)
log.info("Apply transmitted flux fraction")
if not args.no_transmission:
tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux,
trans_wave,transmission)
# if requested, compute metal transmission on the fly
# (if not included already from the transmission file)
if args.metals is not None:
if args.metals_from_file :
log.error('you cannot add metals twice')
raise ValueError('you cannot add metals twice')
if args.no_transmission:
log.error('you cannot add metals if asking for no-transmission')
raise ValueError('can not add metals if using no-transmission')
lstMetals = ''
for m in args.metals: lstMetals += m+', '
log.info("Apply metals: {}".format(lstMetals[:-2]))
tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux,
trans_wave,transmission,args.metals)
# Attenuate the spectra for extinction
if not sfdmap is None:
Rv=3.1 #set by default
indx=np.arange(metadata['RA'].size)
extinction =Rv*ext_odonnell(tmp_qso_wave)
EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0)
tmp_qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction)
log.info("Dust extinction added")
# if requested, compute magnitudes and apply target selection. Need to do
# this calculation separately for QSOs in the north vs south.
bbflux=None
if args.target_selection or args.bbflux :
bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2']
bbflux=dict()
bbflux['SOUTH'] = is_south(metadata['DEC'])
for band in bands:
bbflux[band] = np.zeros(nqso)
# need to recompute the magnitudes to account for lya transmission
log.info("Compute QSO magnitudes")
for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
(bassmzls_and_wise_filters, decam_and_wise_filters) ):
if np.count_nonzero(these) > 0:
maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
for band, filt in zip( bands, maggies.colnames ):
bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies
if args.target_selection :
log.info("Apply target selection")
isqso = np.ones(nqso, dtype=bool)
for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ):
if np.count_nonzero(these) > 0:
# optical cuts only if using QSO vs SIMQSO
isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these],
rflux=bbflux['FLUX_R'][these],
zflux=bbflux['FLUX_Z'][these],
w1flux=bbflux['FLUX_W1'][these],
w2flux=bbflux['FLUX_W2'][these],
south=issouth, optical=args.no_simqso)
log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso))
selection=np.where(isqso)[0]
if selection.size==0 : return
tmp_qso_flux = tmp_qso_flux[selection]
metadata = metadata[:][selection]
meta = meta[:][selection]
qsometa = qsometa[:][selection]
DZ_FOG = DZ_FOG[selection]
for band in bands :
bbflux[band] = bbflux[band][selection]
bbflux['SOUTH']=bbflux['SOUTH'][selection]
nqso = selection.size
if not sfdmap is None and eboss is None:
flux_assigned = 10**((22.5-mags)/2.5)
scalefac=flux_assigned/bbflux['FLUX_R']
tmp_qso_flux=scalefac[:,None]*tmp_qso_flux
for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
(bassmzls_and_wise_filters, decam_and_wise_filters) ):
if np.count_nonzero(these) > 0:
maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
for band, filt in zip( bands, maggies.colnames ):
bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt])
log.info("Rescaling flux to match magnitudes")
log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
# careful integration of bins, not just a simple interpolation
qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1)
qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size))
for q in range(tmp_qso_flux.shape[0]) :
qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q])
log.info("Simulate DESI observation and write output file")
if "MOCKID" in metadata.dtype.names :
#log.warning("Using MOCKID as TARGETID")
targetid=np.array(metadata["MOCKID"]).astype(int)
elif "ID" in metadata.dtype.names :
log.warning("Using ID as TARGETID")
targetid=np.array(metadata["ID"]).astype(int)
else :
log.warning("No TARGETID")
targetid=None
specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest}
if args.target_selection or args.bbflux :
fibermap_columns = dict(
FLUX_G = bbflux['FLUX_G'],
FLUX_R = bbflux['FLUX_R'],
FLUX_Z = bbflux['FLUX_Z'],
FLUX_W1 = bbflux['FLUX_W1'],
FLUX_W2 = bbflux['FLUX_W2'],
)
photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1')
photsys[bbflux['SOUTH']] = b'S'
fibermap_columns['PHOTSYS'] = photsys
else :
fibermap_columns=None
if args.eboss:
specsim_config_file = 'eboss'
else:
specsim_config_file = 'desi'
### use Poisson = False to get reproducible results.
### use args.save_resolution = False to not save the matrix resolution per quasar in spectra files.
resolution=sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename,
sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid,
meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False,
specsim_config_file=specsim_config_file, dwave_out=dwave_out, save_resolution=args.save_resolution, source_contribution_smoothing=args.source_contr_smoothing)
### Keep input redshift
Z_spec = metadata['Z'].copy()
Z_input = metadata['Z'].copy()-DZ_FOG
### Add a shift to the redshift, simulating the systematic imprecision of redrock
DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input)
log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los))
meta['REDSHIFT'] += DZ_sys_shift
metadata['Z'] += DZ_sys_shift
### Add a shift to the redshift, simulating the statistic imprecision of redrock
if args.gamma_kms_zfit:
log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit))
DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input)
meta['REDSHIFT'] += DZ_stat_shift
metadata['Z'] += DZ_stat_shift
## Write the truth file, including metadata for DLAs and BALs
log.info('Writing a truth file {}'.format(truth_filename))
meta.rename_column('REDSHIFT','Z')
meta.add_column(Column(Z_spec,name='TRUEZ'))
meta.add_column(Column(Z_input,name='Z_INPUT'))
meta.add_column(Column(DZ_FOG,name='DZ_FOG'))
meta.add_column(Column(DZ_sys_shift,name='DZ_SYS'))
if args.gamma_kms_zfit:
meta.add_column(Column(DZ_stat_shift,name='DZ_STAT'))
if 'Z_noRSD' in metadata.dtype.names:
meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD'))
else:
log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file')
#Save global seed and pixel seed to primary header
hdr=pyfits.Header()
hdr['GSEED']=global_seed
hdr['PIXSEED']=seed
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message=".*nanomaggies.*")
hdu = pyfits.convenience.table_to_hdu(meta)
hdu.header['EXTNAME'] = 'TRUTH'
hduqso=pyfits.convenience.table_to_hdu(qsometa)
hduqso.header['EXTNAME'] = 'TRUTH_QSO'
hdulist=pyfits.HDUList([pyfits.PrimaryHDU(header=hdr),hdu,hduqso])
if args.dla :
hdulist.append(hdu_dla)
if args.balprob :
hdulist.append(hdu_bal)
if args.save_continuum :
hdulist.append(hdu_trueCont)
# Save one resolution matrix per camera to the truth file instead of one per quasar to the spectra files.
if not args.save_resolution:
for band in resolution.keys():
hdu = pyfits.ImageHDU(name="{}_RESOLUTION".format(band.upper()))
hdu.data = resolution[band].astype("f4")
hdulist.append(hdu)
hdulist.writeto(truth_filename, overwrite=True)
hdulist.close()
if args.zbest :
log.info("Read fibermap")
fibermap = read_fibermap(ofilename)
log.info("Writing a zbest file {}".format(zbest_filename))
columns = [
('CHI2', 'f8'),
('COEFF', 'f8' , (4,)),
('Z', 'f8'),
('ZERR', 'f8'),
('ZWARN', 'i8'),
('SPECTYPE', (str,96)),
('SUBTYPE', (str,16)),
('TARGETID', 'i8'),
('DELTACHI2', 'f8'),
('BRICKNAME', (str,8))]
zbest = Table(np.zeros(nqso, dtype=columns))
zbest['CHI2'][:] = 0.
zbest['Z'][:] = metadata['Z']
zbest['ZERR'][:] = 0.
zbest['ZWARN'][:] = 0
zbest['SPECTYPE'][:] = 'QSO'
zbest['SUBTYPE'][:] = ''
zbest['TARGETID'][:] = metadata['MOCKID']
zbest['DELTACHI2'][:] = 25.
hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST'
hfmap = pyfits.convenience.table_to_hdu(fibermap); hfmap.name='FIBERMAP'
hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap])
hdulist.writeto(zbest_filename, overwrite=True)
hdulist.close() # see if this helps with memory issue
def _func(arg) :
""" Used for multiprocessing.Pool """
return simulate_one_healpix(**arg)
def main(args=None):
log = get_logger()
if isinstance(args, (list, tuple, type(None))):
args = parse(args)
if args.outfile is not None and len(args.infile)>1 :
log.error("Cannot specify single output file with multiple inputs, use --outdir option instead")
return 1
if not os.path.isdir(args.outdir) :
log.info("Creating dir {}".format(args.outdir))
os.makedirs(args.outdir)
if args.mags :
log.warning('--mags is deprecated; please use --bbflux instead')
args.bbflux = True
exptime = args.exptime
if exptime is None :
exptime = 1000. # sec
if args.eboss:
exptime = 1000. # sec (added here in case we change the default)
#- Generate obsconditions with args.program, then override as needed
obsconditions = reference_conditions[args.program.upper()]
if args.airmass is not None:
obsconditions['AIRMASS'] = args.airmass
if args.seeing is not None:
obsconditions['SEEING'] = args.seeing
if exptime is not None:
obsconditions['EXPTIME'] = exptime
if args.moonfrac is not None:
obsconditions['MOONFRAC'] = args.moonfrac
if args.moonalt is not None:
obsconditions['MOONALT'] = args.moonalt
if args.moonsep is not None:
obsconditions['MOONSEP'] = args.moonsep
if args.no_simqso:
log.info("Load QSO model")
model=QSO()
else:
log.info("Load SIMQSO model")
#lya_simqso_model.py is located in $DESISIM/py/desisim/scripts/.
#Uses a different emmision lines model than the default SIMQSO.
#We will update this soon to match with the one used in select_mock_targets.
model=SIMQSO(nproc=1,sqmodel='lya_simqso_model')
decam_and_wise_filters = None
bassmzls_and_wise_filters = None
if args.target_selection or args.bbflux :
log.info("Load DeCAM and WISE filters for target selection sim.")
# ToDo @moustakas -- load north/south filters
decam_and_wise_filters = filters.load_filters('decam2014-g', 'decam2014-r', 'decam2014-z',
'wise2010-W1', 'wise2010-W2')
bassmzls_and_wise_filters = filters.load_filters('BASS-g', 'BASS-r', 'MzLS-z',
'wise2010-W1', 'wise2010-W2')
footprint_healpix_weight = None
footprint_healpix_nside = None
if args.desi_footprint :
if not 'DESIMODEL' in os.environ :
log.error("To apply DESI footprint, I need the DESIMODEL variable to find the file $DESIMODEL/data/footprint/desi-healpix-weights.fits")
sys.exit(1)
footprint_filename=os.path.join(os.environ['DESIMODEL'],'data','footprint','desi-healpix-weights.fits')
if not os.path.isfile(footprint_filename):
log.error("Cannot find $DESIMODEL/data/footprint/desi-healpix-weights.fits")
sys.exit(1)
pixmap=pyfits.open(footprint_filename)[0].data
footprint_healpix_nside=256 # same resolution as original map so we don't loose anything
footprint_healpix_weight = load_pixweight(footprint_healpix_nside, pixmap=pixmap)
if args.gamma_kms_zfit and not args.zbest:
log.info("Setting --zbest to true as required by --gamma_kms_zfit")
args.zbest = True
if args.extinction:
sfdmap= SFDMap()
else:
sfdmap=None
if args.balprob:
bal=BAL()
else:
bal=None
if args.eboss:
eboss = { 'footprint':FootprintEBOSS(), 'redshift':RedshiftDistributionEBOSS() }
else:
eboss = None
if args.dn_dzdm is None:
args.dn_dzdm=os.path.join(os.environ['DESISIM'],'py/desisim/data/dn_dzdM_EDR.fits')
if args.nproc > 1:
func_args = [ {"ifilename":filename , \
"args":args, "model":model , \
"obsconditions":obsconditions , \
"decam_and_wise_filters": decam_and_wise_filters , \
"bassmzls_and_wise_filters": bassmzls_and_wise_filters , \