-
Notifications
You must be signed in to change notification settings - Fork 0
/
make_sims.py
1511 lines (1323 loc) · 87 KB
/
make_sims.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
### User options
### Riley Peterson October 2017
### I need to credit this website for teaching me the inverse transform sampling
### http://www.nehalemlabs.net/prototype/blog/2013/12/16/how-to-do-inverse-transformation-sampling-in-scipy-and-numpy/
### Though ultimately I didn't use this, it was interesting and useful nevertheless
### This is here, so that make pairs works
import numpy as np
### Logistics
base_folder = "/Users/rpeterson/FP/GALFIT_Simulations1/galfitsim1_astrodrizzled/" #directory where simulations will be made, created if it doens't exist
login_cl_path = "/Users/rpeterson/" #so that from pyraf import iraf is imported properly
psf_image = "/Users/rpeterson/Downloads/uds_2epoch_f160w_v0.3_psf.fits" #psf kernel that will be convolve with the galaxies
galfit_binary = "/net/frigg/Users/inger/bin/galfit" #path to GALFIT, if you can run GALFIT with "$galfit obj.in", just put galfit here
### Make input files for your galaxies
make_feedmes = 'yes' # if "yes", feedmes are made for the number of galaxies in number_of_sims. If running make_pairs I believe this needs to be "yes".
feedme_verbose = "no"
large_field_xdim,large_field_ydim = 1014,1014 # x,y ; the dimensions of your simulated image
mag_range = [16,28] # The next five parameters are ranges for the parameters, if make_interpolate_chain = "yes" and the appropriate variables are defined these are interpolated, else selected random from within the ranges
re_range = [2,40]
n_range = [0.5,5.5]
q_range = [0.05,1.0]
pa_range = [-90,90]
number_of_sims = 1037 # Should be even int, number of simulated galaxies in the image, if make_pairs="yes" and the maximum pixel distance is large it might not be possible to place this number, the script will inform the user if this happens
cutout_size = 400 # Should be even int, length size of the cutouts created (recommend large ~400 so that none of the light is accidentally clipped if it is a large galaxy)
sky_range = [0,0] #Range of sky values, I generally leave this as [0,0], works like the ranges above but is for the sky
mag_zp = 25.9463 #Magnitude zeropoint
#plt_scale_ratio=2.1375 #I don't think this is necessary anymore. For drizzling, since my flts had plate scale (0.13) and final drizzle was (0.06) I was trying to simulate galaxies in the flt using data from the drz, therefore their radii had to be shrunk. I figured out how to use the blot task to create an flt from the drz, so this happens then.
universal_seed = 6061944 ### can be None or int, super helpful for reproducibility. Same seed produces same results
make_interpolate_chain = "yes" # if "yes", values are interpolated from SExtractor output tables. Currently, needs to be "yes" if making pairs.
if make_interpolate_chain=="yes":
#first parameter to be interpolated
chain_type1 = "cubic"
chain_plot1 = "yes"
chain_bins1 = 40
chain_data1 = "/Users/rpeterson/FP/GALFIT_Simulations1/SExtractor_missing_problem/actual_data_trial_field/UV105842F160W/UV105842F160W_phot/UV105842F160W_tab_all.fits" #Only works for fits files right now, might make for text later
chain_column1 = "MAG_AUTO"
chain_obj1 = "mag"
do_chain2 = "yes"
if do_chain2=="yes":
#second parameter which is interpolated
chain_type2 = "linear"
chain_plot2 = "yes"
chain_data2 = "/Users/rpeterson/FP/GALFIT_Simulations1/SExtractor_missing_problem/actual_data_trial_field/UV105842F160W/UV105842F160W_phot/UV105842F160W_tab_all.fits" #Only works for fits files right now, might make for text later
chain_column2 = "FLUX_RADIUS"
chain_obj2 = "re"
#if "re" is chain_obj2, will assume that "ELLIPTICITY" exists within chain_data2, and will interpolate the circularized effective radius
#chain_type3 = "linear"
#chain_plot3 = "yes"
#chain_data3 = "/Users/rpeterson/FP/UV-105842_id6p01/UV-105842_id6p01_phot/uv-105842_d6p-01-284.0-f160wtab_all.fits" #Only works for fits files right now, might make for text later
#chain_column3 = "FLUX_RADIUS"
#chain_obj3 = "re"
### Makes pairs of galaxies. First half are "parents" randomly placed, second half are "childs" determined by stepping through these intervals
### The last argument in each of these multiplied together should be half the number of sims
### For example pixel_distance_steps=np.linspace(5,55,10),luminosity_ratio_steps=np.linspace(0.5,2,4),re_ratio_steps=np.linspace(0.2,3,5),n_steps=np.linspace(1,4,2)
### My number of sims is 800, half of this is 400. 10*4*5*2=400
make_pairs="no"
if make_pairs=="yes":
pixel_distance_steps=np.linspace(5,55,10) #These can also be custom non linear arrays
luminosity_ratio_steps=np.linspace(0.5,2,4) #2 means child galaxy will be twice as bright
interp_re="yes" # if "yes" the re value will be interpolated from the make_interpolate_chain
if interp_re=="no":
re_ratio_steps=np.linspace(0.2,3,5) #ratio is circularized effective radius, so it factors in axis ratio, 2 means child will be twice as large (circularized re)
n_steps=np.linspace(1,4,2)
### Makes the galaxies by sending their feedme/input files to the terminal
make_galaxies = 'no'
make_galaxies_verbose = "no"
### Convolves with the provided PSF using from astropy.convolution import convolve_fft
convolve = 'no'
convolution_verbose = "yes"
### Adds noise to individual cutouts
mknoise_cutouts = 'no' #Should probably be yes if adding to existing image
mknoise_cutouts_verbose = "yes"
if mknoise_cutouts=="yes":
background = 0.136392
gain = 6529.37744
rdnoise = 4
poisson = 'yes'
display_5 = 'no' ### if "yes", displays 5 galaxies at random and shows their feedme files
### Large field
create_large_field = 'yes' # if "yes", allows the large field to be created
display_large_field = "yes"
if create_large_field=="yes":
image_to_sim = "default" #should be full path to it, shouldn't be in base_folder. If not "default", the image provided will be used to determine possible coordinates (useful if image is rotated)
image_to_sim_ext=0 #default is 0, if image_to_sim has multiple extensions this is the extensions number you want to simulate
image_to_add_to="default" #should be full path to it. If not "default" galaxies will be added to this image
sky_value = 0 # Should probably be zero, if nonzero it sets all the points in your simulated image that aren't zero in the original to sky_value
add_mknoise_large1 = "no" # I doubt this will ever be used. Probably should be set to "no". Adds noise to a blank image.
if add_mknoise_large1=="yes":
background1 = 0.136392
gain1=6529.37744
rdnoise1=2
poisson1="yes"
add_models = "yes" #if "yes", models will be added to the image
add_models_verbose = "yes" #my preference is to leave this as "yes"
label = "yes" #if "yes" a region file will be created and if display_large_field="yes" the region file is displayed. Note their is a 3 pixel offset so that labels don't block the actual galaxy.
label_verbose = "no"
save = "no" #if "yes", saves final output
if add_models == "yes":
which_to_add="_convolved" #options are "", "_convolved", "_noised" or "_convolved_noised" #the type of model that will be added
how_many_to_add = 1037 # should be <= number_of_sims, number of models that will be added to the large field
not_near_edge="yes" # if "yes", galaxies are prevented from going near the edge of the image according to the pixel distance clearance
if not_near_edge=="yes":
clearance=50
### Adds noise after galaxies have been placed in the image, should probably be no if adding to existing images
add_mknoise_large2="yes" #If drizzling this should be "no", but the values below should be defined. if "yes", then drizzle should be set to "no", and noise is added using IRAF's mknoise.
background2 = 0.623
gain2=1632.34375/2.5 #mknoise gain
rdnoise2=20 #mknoise read noise
poisson2="yes" #mknoise add poisson noise
drizzle="yes" #if "yes", drizzling is performed
if drizzle=="yes":
path_to_flts="/Users/rpeterson/Downloads/id6p01???_flt.fits" #Path to your flt images wildcard (*) or question mark (?) should be placed where necessary. E.g. path_to_flts="/Users/rpeterson/Downloads/id6p01???_flt.fits", such that glob.glob(path_to_flts) will get all your flts
path_to_actual_driz="/Users/rpeterson/FP/do_everything/UV105842F160W/UV105842F160W_drz.fits"
display_blots="yes" #Will display blotted (distorted flts)
backgrounds=[0.6351,0.606,0.6373,0.606] #If "default" will use the background2 value above as the value for each flt, otherwise it is the list of specific values for each simulated flt e.g. backgrounds=[0.6351,0.606,0.6373,0.606]
###Originally I had the sky comparison be for user specified regions but this became way too complicated, instead the entire image is compared (histogram ranges are determined by range_sky=(im1_med-3*im1_std,im1_med+3*im1_std) see definition sky_compare for more details)
compare_noised_blots_sky="yes" #Will display the blotted noised images
#Astrodrizzle Parameters, default is to just make the finally drizzled image without sky subtraction and to clean intermediate images, the user can edit this within the actual code to suite their needs
final_bits,final_pixfrac,final_fillval,final_scale,final_rot,resetbits=576,0.8,0,0.06,0,0
compare_final_drz="yes"
compare_sky="yes" #if "yes", compares sky of final_image for example drizzle="no" and add_mknoise_large2="yes" to image to sim. If image_to_sim is default makes a histogram of the simulate image
#If you really want to compare specific regions use the commented part below, otherwise will use the whole image and clip the sky according to the median
"""
if compare_sky=="yes":
real_sky_lowerx,real_sky_lowery, real_sky_upperx,real_sky_uppery=489,690,605,765 ### Eight coordinates here, if any==None, then both images are display, and user is prompted for values
sim_sky_lowerx,sim_sky_lowery, sim_sky_upperx,sim_sky_uppery=None,446,593,498
"""
############################################################ Cross with caution
### Imports
import random
import os
###Check for slash at end of base_folder
if base_folder[-1]!="/":
print("Adding a slash to the end of the base_folder")
base_folder=base_folder+"/"
if os.path.exists(base_folder)==False:
os.mkdir(base_folder)
import subprocess
import time
from astropy.io import fits
from astropy.convolution import convolve_fft
import sys
#from multiprocessing import Pool
import matplotlib.pyplot as plt
import numpy as np
import scipy.interpolate as interpolate
from drizzlepac import astrodrizzle, ablot #If you don't have these and don't want to drizzle you can comment them out
from stwcs import wcsutil
import glob
import pandas as pd #in case I want to incorporate the master_stats.py definitions into here
os.chdir(login_cl_path)
from pyraf import iraf
iraf.artdata(_doprint=0)
os.chdir(base_folder)
plt_fig=1
plt.close("all")
np.random.seed(universal_seed)
random.seed(universal_seed)
chain_seed1 = universal_seed
chain_seed2 = universal_seed
seed1=universal_seed
seed2=universal_seed
seed = universal_seed
if universal_seed is None:
seed,seed1,seed2="INDEF"
### Definitions
def run(source, reference, output=None, interp='poly5', sinscl=1.0, stepsize=10, clobber=False):
#This script given by Warren Hack (hack@stsci.edu), STScI Science Software Branch, 14 Sept 2015
#Modified by Ryan Cole, 17 Sept. 2015
#Modified by Riley Peterson, 5 Dec. 2017
""" Blot the source image (simple FITS format) to match the WCS of the reference image
Parameters
==========
source : string
Filename of input/source image to be blotted
reference : string
Filename of image which the input image will be blotted to match
output : string
Filename of output blotted image
clobber : boolean
Specify whether or not to overwrite (delete then replace) the output file if a file
with that same name already exists on disk
ASSUMPTIONS:
1. All files (source, reference and blotted image)are in simple FITS format.
2. WCS information does not include NPOL or D2IM corrections
(simple FITS does not support such corrections)
3. Source and reference files have the same units, and exptime can be set to 1
to match count rate units.
"""
if output:
# Insure output file does not already exist
if os.path.exists(output) and not clobber:
print 'Quitting... Output file already exists. Please rename or delete, then restart.'
return
if os.path.exists(output) and clobber:
print 'Removing ',output,' to replace with new result...'
os.remove(output)
# Load data and WCS from "input" source image
#My simulated images don't have WCS coordinates so this doesn't make sense and I edit it so that the source has the same wcs as the reference
src = fits.getdata(source)
src_wcs = wcsutil.HSTWCS(source)
# Load WCS from reference image
blot_wcs = wcsutil.HSTWCS(reference)
out_wcs = blot_wcs.copy()
print(out_wcs)
# Define exptime to scale source drizzled image to reference image units
exptime = 1
# Blot src to reference
outsci = ablot.do_blot(src, src_wcs, out_wcs, exptime, coeffs=True,
interp=interp, sinscl=sinscl, stepsize=stepsize) #,
# wcsmap=drizzlepac.wcs_functions.WCSMap)
if output:
hdr = fits.getheader(source)
#return hdr
# Update WCS information of source image with
# WCS values from reference/output WCS
wcs_hdr = blot_wcs.wcs2header(sip2hdr=True)
for kw in wcs_hdr:
hdr[kw]=wcs_hdr[kw]
#hdr.update(kw,wcs_hdr[kw])
hdr["VAFACTOR"]=blot_wcs.vafactor
hdr["ORIENTAT"]=blot_wcs.orientat
hdr["CTYPE1"]="RA---TAN-SIP"
hdr["CTYPE2"]="DEC--TAN-SIP"
#hdr.update('VAFACTOR',blot_wcs.vafactor)
#hdr.update('ORIENTAT',blot_wcs.orientat)
# Create output file here...
phdu = fits.PrimaryHDU(data=outsci,header=hdr)
# write out file now...
phdu.writeto(output)
return outsci
###Analyzes sky of one or two images
def sky_compare(image1,x1,y1,x2,y2,ext1=0,image2=None,x1_2=None,y1_2=None,x2_2=None,y2_2=None,ext2=0,close="no",fig_num=100,clean2="no",clean1="no"):
plt.figure(fig_num)
if close=="yes":
plt.close("all")
im1_full_array=fits.open(image1,memmap=True)[ext1].data
name1=image1.split("/")[-1]
if clean2=="no" or image2 is None:
im1_crop_array=im1_full_array[y1:y2,x1:x2].flat
im1_mean,im1_std,im1_med=np.mean(im1_crop_array),np.std(im1_crop_array),np.median(im1_crop_array)
if clean1=="yes":
im1_crop_array=im1_crop_array[im1_med/3<im1_crop_array] #This is here to exclude bad pixels
im1_crop_array=im1_crop_array[im1_crop_array<im1_med*1.66]
im1_mean,im1_std,im1_med=np.mean(im1_crop_array),np.std(im1_crop_array),np.median(im1_crop_array)
range_sky=(im1_med-3*im1_std,im1_med+3*im1_std)
plt.hist(im1_crop_array,label=name1+" data "+str(len(im1_crop_array))+" mean:"+str(im1_mean)+" std:"+str(im1_std)+" median:"+str(im1_med),
alpha=0.5,bins=100,range=range_sky,normed=True)
print(name1+" mean,stddev,median: "+str(im1_mean)+" "+str(im1_std)+" "+str(im1_med))
if image2 is not None:
im2_full_array=fits.open(image2,memmap=True)[ext2].data
name2=image2.split("/")[-1]
im2_crop_array=im2_full_array[y1_2:y2_2,x1_2:x2_2].flat
im2_mean,im2_std,im2_med=np.mean(im2_crop_array),np.std(im2_crop_array),np.median(im2_crop_array)
if clean2=="yes":
im2_crop_array=im2_crop_array[im2_med/3<im2_crop_array] #This is here to exclude bad pixels
im2_crop_array=im2_crop_array[im2_crop_array<im2_med*1.66] #This is here to exclude bad pixels in the flts
im2_mean,im2_std,im2_med=np.mean(im2_crop_array),np.std(im2_crop_array),np.median(im2_crop_array)
range_sky=(im2_med-3*im2_std,im2_med+3*im2_std)
plt.hist(im2_crop_array,label=image2.split("/")[-1]+" data "+str(len(im2_crop_array))+" mean:"+str(im2_mean)+" std:"+str(im2_std)+" median:"+str(im2_med),
alpha=0.5,bins=100,range=range_sky,normed=True)
print(name2+" mean,stddev,median: "+str(im2_mean)+" "+str(im2_std)+" "+str(im2_med))
if clean2=="yes":
im1_crop_array=im1_full_array[y1:y2,x1:x2].flat
im1_crop_array=im1_crop_array[im2_med/3<im1_crop_array] #This is here to exclude bad pixels
im1_crop_array=im1_crop_array[im1_crop_array<im2_med*1.66] #This is here to exclude bad pixels in the flts
im1_mean,im1_std,im1_med=np.mean(im1_crop_array),np.std(im1_crop_array),np.median(im1_crop_array)
plt.hist(im1_crop_array,label=name1+" data "+str(len(im1_crop_array))+" mean:"+str(im1_mean)+" std:"+str(im1_std)+" median:"+str(im1_med),
alpha=0.5,bins=100,range=range_sky,normed=True)
print(name1+" mean,stddev,median: "+str(im1_mean)+" "+str(im1_std)+" "+str(im1_med))
plt.legend(loc="upper right")
plt.xlabel("sky")
plt.ylabel("number in bin (normalized)")
plt.show()
#Builds top of feedme file
def top(file1,output_name,cutout_size,mag_zp):
file1.write("#================================================================================"+'\n')
file1.write("# IMAGE PARAMETERS"+'\n')
file1.write("B) "+output_name+" # Output data image block"+'\n')
file1.write("H) 1 "+str(cutout_size)+" 1 "+str(cutout_size)+" # Image region to fit (xmin xmax ymin ymax)"+'\n')
file1.write("J) "+str(mag_zp)+" # Magnitude photometric zeropoint"+'\n')
file1.write("O) regular # Display type (regular, curses, both)"+'\n')
file1.write("P) 1 # Create ouput only? (1=yes; 0=optimize)"+'\n')
file1.write("S) 0 # Modify/create objects interactively? )"+'\n')
file1.write('\n')
#Builds component 1 randomly
def comp1(file1,cutout_size,mag_range,re_range,n_range,q_range,pa_range,plt_scale_ratio=1):
mag=str(random.uniform(mag_range[0],mag_range[1]))
re=str(random.uniform(re_range[0],re_range[1])/plt_scale_ratio)
n=str(random.uniform(n_range[0],n_range[1]))
q=str(random.uniform(q_range[0],q_range[1]))
pa=str(random.uniform(pa_range[0],pa_range[1]))
file1.write("#Simulated galaxy parameters"+'\n')
file1.write(" 0) sersic # Object type"+'\n')
file1.write(" 1) "+str(cutout_size/2)+" "+str(cutout_size/2)+" 1 1 # position x, y"+'\n')
file1.write(" 3) "+mag+" 1 # total magnitude"+'\n')
file1.write(" 4) "+re+" 1 # R_e [pixels]"+'\n')
file1.write(" 5) "+n+" 1 # exponent (de Vaucouleurs = 4)"+'\n')
file1.write(" 9) "+q+" 1 # axis ratio (b/a)"+'\n')
file1.write("10) "+pa+" 1 # position angle (PA) [deg: Up=0, Left=90]"+'\n')
file1.write('\n')
return mag,re,n,q,pa
#Builds sky according to sky range
def sky(file1,sky_range):
file1.write("0) sky # Object type"+'\n')
file1.write("1) "+str(random.uniform(sky_range[0],sky_range[1]))+" 0 # sky background"+'\n')
file1.write('\n')
#Used for pairs approach to get the coordinates within a given distance and x,y
def get_circle_coords(distance,x,y):
xmin,xmax,ymin,ymax=x-distance,x+distance,y-distance,y+distance
vals=[]
for x1 in range(xmin,xmax+1):
for y1 in range(ymin,ymax+1):
distance_val=((x1-x)**2+(y1-y)**2)**0.5
if distance_val<=distance:
vals.append((x1,y1,distance_val))
return vals
#Builds the feedme file for the interpolated values, this could probably made simpler
if make_interpolate_chain=="yes":
def comp1_interp_chain(file1,cutout_size,mag_range,re_range,n_range,q_range,pa_range,chain_obj1,chain_val1,
chain_obj2,chain_val2,plt_scale_ratio=1,manual_overwrite="no",mag_val=None,re_val=None,n_val=None,q_val=None):
file1.write("#Simulated galaxy parameters"+'\n')
file1.write(" 0) sersic # Object type"+'\n')
file1.write(" 1) "+str(cutout_size/2+1)+" "+str(cutout_size/2+1)+" 1 1 # position x, y"+'\n')
mag=str(random.uniform(mag_range[0],mag_range[1]))
re=str(random.uniform(re_range[0],re_range[1]))
n=str(random.uniform(n_range[0],n_range[1]))
ar=str(random.uniform(q_range[0],q_range[1]))
pa=str(random.uniform(pa_range[0],pa_range[1]))
### do chain1
if chain_obj1=="mag":
mag=str(chain_val1)
if chain_obj1=="re":
re=str(chain_val1)
if chain_obj1=="n":
n=str(chain_val1)
if chain_obj1=="ar":
ar=str(chain_val1)
if chain_obj1=="pa":
pa=str(chain_val1)
### do chain2
print("circularized effective radius",re)
if chain_obj2 is not None and chain_val2 is not None:
if chain_obj2=="mag":
mag=str(chain_val2)
if chain_obj2=="re":
re=str(chain_val2/float(ar)**0.5) ###Because the chain_valued re is the circularized effective radius and we need the effective semi-major axis as input for GALFIT
print("semi-major radius",re)
if chain_obj2=="n":
n=str(chain_val2)
if chain_obj2=="ar":
ar=str(chain_val2)
if chain_obj2=="pa":
pa=str(chain_val2)
"""
### do chain3
if chain_obj3!=None and chain_array3!=None:
if chain_obj3=="mag":
mag=str(np.random.choice(chain_array3))
if chain_obj3=="re":
re=str(np.random.choice(chain_array3))
if chain_obj3=="n":
n=str(np.random.choice(chain_array3))
if chain_obj3=="ar":
ar=str(np.random.choice(chain_array3))
if chain_obj3=="pa":
pa=str(np.random.choice(chain_array3))
"""
if manual_overwrite=="yes":
mag,re,n,ar=str(mag_val),str(re_val),str(n_val),str(q_val)
#mag,re,n,ar,pa="24.5993","4.9040","0.8374","1.0","-37.8909"
#re=str(float(re)/plt_scale_ratio)
file1.write(" 3) "+mag+" 1 # total magnitude"+'\n')
file1.write(" 4) "+re+" 1 # R_e [pixels]"+'\n')
file1.write(" 5) "+n+" 1 # exponent (de Vaucouleurs = 4)"+'\n')
file1.write(" 9) "+ar+" 1 # axis ratio (b/a)"+'\n')
file1.write("10) "+pa+" 1 # position angle (PA) [deg: Up=0, Left=90]"+'\n')
file1.write('\n')
return mag,re,n,ar,pa
#Perform interpolation for chain1
def make_chain_vals1(chain_data1,chain_bins1,chain_seed1,chain_type1,chain_obj1,chain_column1,plt_fig=plt_fig):
###Open chain_data1 and get a histogram of the values
chain_data1 = fits.open(chain_data1)[1].data[chain_column1]
chain_hist1, chain_bins1 = np.histogram(np.sort(chain_data1), bins=chain_bins1, normed=True, density=True)
### There are two ways to proceed personally I think the second way is better
### Way #1 inverse transform sampling:
chain_values1 = np.zeros(chain_bins1.shape)
chain_values1[1:] = np.cumsum(chain_hist1*np.diff(chain_bins1))
"""
### There are two ways to proceed personally I think the second way is better
### Way #1 inverse transform sampling:
epsilon=0
for i in range(0,len(chain_values1)-1):
if chain_values1[i]>=chain_values1[i+1]:
chain_values1[i+1]=chain_values1[i+1]+1*10**-11+epsilon
epsilon=epsilon+1*10**-12
#print(chain_values1)
#print(chain_bins1)
chain_inv_cdf1 = interpolate.interp1d(chain_values1, chain_bins1, kind=chain_type1)
np.random.seed(chain_seed1)
chain_r1 = helper #Should ensure that we make a random choice every time but from the same distribution
mag_array = chain_inv_cdf1(chain_r1)
plt.hist(chain_data1, bins=chain_bins1, alpha=0.4, normed=True, color='b',label="original data",zorder=2)
plt.hist(chain_inv_cdf1(chain_r1), bins=chain_bins1, alpha=0.5, normed=True, color='g', label="interpolated data",zorder=1)
plt.legend(loc="upper right")
plt.xlabel(chain_obj1)
plt.ylabel("Probability")
plt.title("Chain1 Results")
"""
### Way #2 Interpolate PDF of data from histogram, finely sample from this to get
### a list of data values and their interpolated weight then randomly select from this distribution (with weighting)
chain_linspace1 = np.linspace(np.min(chain_data1),np.max(chain_data1),chain_values1.size-1)
chain_pre_interp1 = interpolate.interp1d(chain_linspace1, chain_hist1*np.diff(chain_bins1), kind=chain_type1, fill_value="extrapolate") #interpolates PDF
x_chain=np.linspace(np.min(chain_data1),np.max(chain_data1),100000) #Creates discrete range of possible data values
p = chain_pre_interp1(x_chain)/np.sum(chain_pre_interp1(x_chain))
p[p<0]=0
mag_array = np.random.choice(x_chain,size=number_of_sims,p=p/np.sum(p))
chain_sim_data = np.random.choice(x_chain,size=100000,p=p/np.sum(p))
plt.figure(plt_fig)
plt.hist(mag_array,bins=chain_bins1,alpha=0.5,normed=True,label="simulated data sample size "+str(number_of_sims),color="r",zorder=3)
plt.plot(chain_sim_data[chain_sim_data.argsort()],chain_pre_interp1(chain_sim_data)[chain_sim_data.argsort()]/np.diff(chain_bins1)[0], label="simulated data", color="g",zorder=1)
plt.hist(chain_data1, bins=chain_bins1, alpha=0.5, normed=True, color='b',label="original data",zorder=2)
#plt.plot(x,chain_pre_interp1(x))
plt.legend(loc="upper right")
plt.xlabel(chain_obj1)
plt.ylabel("Probability")
plt.title("Chain1 Results")
return mag_array
def make_chain_func2(chain_data1,chain_column1,chain_array1,chain_obj1,chain_data2,chain_type2,chain_obj2,chain_column2,plt_fig=plt_fig):
###returns the function that interpolates between chain1 and chain2
chain_data2_name=chain_data2
chain_data2 = fits.open(chain_data2)[1].data[chain_column2]
chain_data2 = chain_data2.astype("f8")
if chain_obj2=="re":
chain_data_q2 = fits.open(chain_data2_name)[1].data["ELLIPTICITY"]
chain_data_q2 = (1-chain_data_q2.astype("f8"))**0.5 ###get sqrt of axis ratio
chain_data2=np.multiply(chain_data_q2,chain_data2) ###multiply by FLUX_RADIUS to get a circularized effective ardius
chain_data1 = fits.open(chain_data1)[1].data[chain_column1]
chain_data1 = chain_data1.astype("f8")
chain_interp_func2 = interpolate.interp1d(chain_data1, chain_data2, kind=chain_type2, fill_value="extrapolate")
chain_x2 = np.linspace(np.min(chain_data1),np.max(chain_data1),100000)
plt.figure(plt_fig+1)
plt.plot(chain_x2, chain_interp_func2(chain_x2),color='g',alpha=0.5,label="interpolated function",zorder=1)
plt.scatter(chain_data1, chain_data2, label="original data",alpha=0.5,zorder=2, color='b')
plt.scatter(chain_array1, chain_interp_func2(chain_array1), label="simulated data sample size "+str(number_of_sims), alpha=0.5, zorder=3, color='r')
#df11=pd.DataFrame.from_dict(np.load("/Users/rpeterson/FP/GALFIT_Simulations1/SExtractor_missing_problem/inputs_dict.npy").item()).T.drop("seed")
#foo=df11[~df11.id.isin(b.values())] #Here b is SEx_to_sim as determined by master_stats.py
#missed_ids,missed_mags,missed_res=foo["id"],foo["mag"].astype(float),np.multiply(foo["re"].astype(float),foo["q"].astype(float)**0.5)
#print(len(missed_ids))
#plt.scatter(missed_mags,missed_res,label="SExtractor misses", alpha=0.5, zorder=4, color='k')
#[plt.annotate(obj[0],xy=(obj[1],obj[2])) for obj in zip(missed_ids,missed_mags,missed_res)]
#plt.scatter(chain_val1, chain_val2, label="simulated data points")
plt.legend(loc="upper right")
plt.xlabel(chain_obj1)
plt.ylabel(chain_obj2)
plt.title("Chain2 Results")
return chain_interp_func2
### Things that don't make sense
stop_button="no"
try:
if how_many_to_add>number_of_sims:
print("Warning:how_many_to_add>number_of_sims. Please set them equal or to values that make sense.")
ask = raw_input("Continue? If 'y' then they will be set equal(y/n):")
if ask=="y":
how_many_to_add=number_of_sims
else:
stop_button="yes"
except:
pass
if stop_button=="yes":
sys.exit()
### Prep for make_pairs
if make_pairs=="yes":
pairs=[]
if interp_re=="yes":
re_ratio_steps=np.linspace(0,0,1)
for a in pixel_distance_steps:
for b in luminosity_ratio_steps:
for c in re_ratio_steps:
for d in n_steps:
pairs.append((a,b,c,d))
if len(pairs)>number_of_sims/2:
print("The length of pair values:"+str(len(pairs))+" is greater than half the number of sims. Either increase the number of sims or decrease the number of possible pair values.")
sys.exit()
### Making input feedme files
initial_time = time.time()
chain_val1_list=[]
chain_val2_list=[]
inputs_dict_file=base_folder+"inputs_dict.npy"
k=1
inputs_dict=dict()
inputs_dict.update({"seed":universal_seed})
if make_feedmes=='yes':
if make_interpolate_chain=="yes":
### Make chain values 1 and the interpolation function for chain values 2
chain_array1=make_chain_vals1(chain_data1,chain_bins1,chain_seed1,chain_type1,chain_obj1,chain_column1,plt_fig=plt_fig)
if do_chain2=="yes":
chain_function2= make_chain_func2(chain_data1,chain_column1,chain_array1,chain_obj1,chain_data2,chain_type2,chain_obj2,chain_column2,plt_fig=plt_fig)
for i in range(1,number_of_sims+1):
if feedme_verbose=="yes":
print("Creating feedme for galaxy number "+str(i)+" elapsed time is:"+str(round((time.time()-initial_time),4)))
output_name = "galout_"+str(i)+"_model.fits"
file1=open(base_folder+"obj_sim_"+str(i)+".in","w")
top(file1,output_name,cutout_size,mag_zp)
input_x,input_y=cutout_size/2,cutout_size/2
if make_interpolate_chain=="yes":
chain_val1=chain_array1[i-1]
if do_chain2=="yes":
chain_val2=chain_function2(chain_val1) ###Remember this is circularized effective radius
else:
chain_obj2=None
chain_val2=None
if make_pairs=="no" or i<=number_of_sims/2:
input_mag,input_re,input_n,input_ar,input_pa=comp1_interp_chain(file1,cutout_size,mag_range,re_range,n_range,q_range,pa_range,chain_obj1,chain_val1,
chain_obj2=chain_obj2,chain_val2=chain_val2,plt_scale_ratio=1)
inputs_dict[i]={"id":i,"x_cutout":input_x,"y_cutout":input_y,"mag":input_mag,"re":input_re,"n":input_n,"q":input_ar,"pa":input_pa}
if make_pairs=="yes" and i>number_of_sims/2:
mag_parent,re_parent,n_parent,q_parent=float(inputs_dict[k]["mag"]),float(inputs_dict[k]["re"]),float(inputs_dict[k]["n"]),float(inputs_dict[k]["q"])
###Want the radius to factor in q. Convert from a_eff to r_eff
circ_re_parent=re_parent*q_parent**0.5
###Get random q (axis ratio) value for child
q_child=random.uniform(q_range[0],q_range[1])
###Find r_eff for child based on the ratio in pairs
circ_re_child=pairs[k-1][2]*circ_re_parent
#circ_re_child=pairs[k-1][2]*circ_re_parent*(q_parent**0.5)/(q_child**0.5)
###Find child a_eff
re_child=circ_re_child/(q_child**0.5)
mag_child,n_child=-2.5*np.log10(pairs[k-1][1])+mag_parent,pairs[k-1][3]
#print(n_child)
if interp_re=="yes":
circ_re_child=chain_function2(mag_child)
re_child=circ_re_child/(q_child**0.5)
input_mag,input_re,input_n,input_ar,input_pa=comp1_interp_chain(file1,cutout_size,mag_range,re_range,n_range,q_range,pa_range,chain_obj1,chain_val1,
chain_obj2,chain_val2,plt_scale_ratio=1,manual_overwrite="yes",mag_val=mag_child,re_val=re_child,n_val=n_child,q_val=q_child)
inputs_dict[i]={"id":i,"x_cutout":input_x,"y_cutout":input_y,"mag":input_mag,"re":input_re,"n":input_n,"q":input_ar,"pa":input_pa}
k=k+1
else:
mag_rand,re_rand,n_rand,q_rand,pa_rand=comp1(file1,cutout_size,mag_range,re_range,n_range,q_range,pa_range,plt_scale_ratio=1)
inputs_dict[i]={"id":i,"x_cutout":input_x,"y_cutout":input_y,"mag":mag_rand,"re":re_rand,"n":n_rand,"q":q_rand,"pa":pa_rand}
sky(file1,sky_range)
file1.close()
np.save(inputs_dict_file,inputs_dict)
### Runnning GALFIT to actually create models (in parallel)
command = galfit_binary
#initial_time = time.time()
if make_galaxies=='yes':
for i in range(1,number_of_sims+1):
os.chdir(base_folder)
name = base_folder+"obj_sim_"+str(i)+".in"
subprocess.Popen([command,name])
if make_galaxies_verbose=="yes":
print("Making galaxy image for number "+str(i)+" elapsed time is:"+str(round((time.time()-initial_time),4)))
"""
def run_galfit(name):
subprocess.Popen([command,name])
print("Making galaxy image for number "+str(i)+" "+str(time.time()-initial_time))
name_list=[]
for i in range(1,number_of_sims+1):
name_list.append(base_folder+"obj_sim_"+str(i)+".in")
Pool().map(run_galfit,name_list)
Pool.close()
print("done")
"""
#initial_time = time.time()
if convolve=='yes':
#ADDRESS NEED TO NORMALIZE PSF BEFORE CONVOLVING
for i in range(1,number_of_sims+1):
try:
os.remove("galout_"+str(i)+"_model_convolved.fits")
except:
pass
input_name = "galout_"+str(i)+"_model.fits"
sim = fits.open(input_name)
psf = fits.open(psf_image)
new_image = convolve_fft(sim[0].data,psf[0].data,normalize_kernel=True) #You can also try boundary="wrap", this seems to be faster, but I'm not sure what the difference is
new_image = np.float32(new_image)
hdu = fits.PrimaryHDU(new_image)
hdu.writeto("galout_"+str(i)+"_model_convolved.fits")
if convolution_verbose=="yes":
print("Performing convolution for galaxy number "+str(i)+" elapsed time is:"+str(round((time.time()-initial_time),4)))
###Make noise for individual cutouts
if mknoise_cutouts=='yes':
for i in range(1,number_of_sims+1):
try:
os.remove("galout_"+str(i)+"_model_convolved_noised.fits")
except:
pass
try:
os.remove("galout_"+str(i)+"_model_noised.fits")
except:
pass
input_name = "galout_"+str(i)+"_model_convolved.fits"
iraf.cd(base_folder)
if convolve=="yes":
iraf.mknoise(input_name,output="galout_"+str(i)+"_model_convolved_noised.fits",background=background,gain=gain,rdnoise=rdnoise,poisson=poisson,seed=seed)
else:
input_name = "galout_"+str(i)+"_model.fits"
iraf.mknoise(input_name,output="galout_"+str(i)+"_model_noised.fits",background=background,gain=gain,rdnoise=rdnoise,poisson=poisson,seed=seed)
if mknoise_cutouts_verbose=="yes":
print("Adding noise for galaxy number "+str(i)+" elapsed time is:"+str(round((time.time()-initial_time),4)))
###Display 5 galaxies
if display_5=='yes':
listo = [random.randrange(1,number_of_sims) for i in range(1,6)]
for i in listo:
objects=[]
for obj in ["galout_"+str(i)+"_model.fits",psf_image,"galout_"+str(i)+"_model_convolved.fits","galout_"+str(i)+"_model_noised.fits","galout_"+str(i)+"_model_convolved_noised.fits"]:
if os.path.exists(obj):
objects.append(obj)
os.system("ds9 "+" -fits ".join(objects)+" &")
os.system("open "+"obj_sim_"+str(i)+".in &")
if create_large_field=='yes':
os.chdir(base_folder)
iraf.cd(base_folder)
iraf.protect("*saved.fits")
#Might be better just to overwrite instead of deleting
try:
iraf.delete("full*")
except:
pass
try:
iraf.delete("*.reg")
except:
pass
if image_to_sim=="default":
if drizzle=="yes":
print("If you are trying to create a simulated drizzle image_to_sim should be your real final drizzled image")
sys.exit()
n = np.ones([large_field_ydim,large_field_xdim])
hdu = fits.PrimaryHDU(n)
hdu.writeto('full_field_with_sky.fits')
else:
os.system("cp "+image_to_sim+" "+base_folder)
os.rename(image_to_sim.split("/")[-1],"full_field_with_sky.fits")
image_now = "full_field_with_sky.fits"
hdulist=fits.open(image_now, mode="update")
print("Making data points that were non zero in original image equal to sky_value in sim image")
image_data=hdulist[0].data
image_data[image_data!=0]=sky_value
hdulist.flush()
hdulist.close()
if add_mknoise_large1=="no" and add_models=="no":
final_large_image=image_now
###I dont expect mknoise1 to get any use
if add_mknoise_large1=="yes" and add_models=="no":
print("Running mknoise1 (before galaxies have been added)")
iraf.mknoise(image_now,output=image_now.replace(".fits","_noise.fits"), background=background1, gain=gain1, rdnoise=rdnoise1, poisson=poisson1, seed=seed1)
final_large_image=image_now.replace(".fits","_noise.fits")
if add_mknoise_large1=="yes" and add_models=="yes":
print("Running mknoise1 (before galaxies have been added)")
iraf.mknoise(image_now,output=image_now.replace(".fits","_noise.fits"), background=background1, gain=gain1, rdnoise=rdnoise1, poisson=poisson1, seed=seed1)
background_field = "full_field_with_sky.fits"
noised_background_field = "full_field_with_sky_noise.fits"
if os.path.exists(noised_background_field):
os.system("cp "+noised_background_field+" full_field_with_sky_noise_galaxies.fits")
image_to_add_gals = "full_field_with_sky_noise_galaxies.fits"
else:
os.system("cp "+background_field+" full_field_with_sky_galaxies.fits")
image_to_add_gals = "full_field_with_sky_galaxies.fits"
###Add models
if add_models == "yes":
if image_to_sim=="default":
image_data=np.ones([large_field_ydim,large_field_xdim])
else:
hdulist=fits.open(image_to_sim,mode="readonly")
image_data=hdulist[0].data
hdulist.close()
###Get list of possible coordinates
i,j=np.nonzero(image_data)
possible_coords = list(zip(i,j))
if not_near_edge=="yes":
###Revise possible coordinates if we don't want galaxies near the edge
non_edge_coords=[]
print("Ensuring galaxies aren't near the edge, this might take a minute")
for coord in possible_coords:
try:
if image_data[coord[0]+clearance,coord[1]+clearance]!=0 and image_data[coord[0]-clearance,coord[1]+clearance]!=0 and image_data[coord[0]+clearance,coord[1]-clearance]!=0 and image_data[coord[0]-clearance,coord[1]-clearance]!=0 and coord[0]-clearance>0 and coord[1]-clearance>0:
non_edge_coords.append(coord)
except:
pass
possible_coords=non_edge_coords
possible_coords_copy=possible_coords
inputs_dict=np.load(inputs_dict_file).item()
if image_to_add_to!="default":
os.system("cp "+image_to_add_to+" "+base_folder)
os.rename(image_to_add_to.split("/")[-1],"full_image_to_add_to.fits")
image_to_add_gals="full_image_to_add_to.fits"
random.seed(universal_seed)
n=1
i=1
skipped_ids=[]
master_break="no"
skip="no"
how_many_to_add_copy=how_many_to_add
parents=how_many_to_add/2
###Start going through the galaxies
while i<=how_many_to_add:
added_image=fits.open(image_to_add_gals,mode="update")
added_image_array=added_image[0].data
cutout=fits.open("galout_"+str(i)+"_model"+which_to_add+".fits") #Issue
cutout_array=cutout[0].data
###I can't remember why I did this if statement here
if len(possible_coords)>0:
idx = random.randint(0,len(possible_coords)-1)
y = possible_coords[idx][0] ### This is opposite of what you expect
x = possible_coords[idx][1]
else:
idx = random.randint(0,len(possible_coords_copy)-1)
y = possible_coords_copy[idx][0] ### This is opposite of what you expect
x = possible_coords_copy[idx][1]
inputs_dict[i].update({"x_global":x,"y_global":y})
if make_pairs=="yes" and i<=how_many_to_add/2:
###We will need to make sure for the first half that nothing is placed within max interdistance of pairs
###This is kind of ugly, I will consider rewritting
selected_coords_so_far=[(int(inputs_dict[id]["x_global"]),int(inputs_dict[id]["y_global"])) for id in sorted(inputs_dict.keys()) if id!="seed" and id<i]
if len(selected_coords_so_far)>0:
start_time=time.time()
while True:
hit_here="no"
for coord in selected_coords_so_far:
distance_val=((coord[0]-x)**2+(coord[1]-y)**2)**0.5
try:
image_data[y,x]
except:
distance_val=0
if distance_val<=pairs[-1][0] or image_data[y,x]==0 or y<0 or x<0:
#print("within radius",distance_val,x,y,len(possible_coords))
idx = random.randint(0,len(possible_coords)-1)
y = possible_coords[idx][0] ### This is opposite of what you expect
x = possible_coords[idx][1]
hit_here="yes"
break
#print(min([((coord1[0]-x)**2+(coord1[1]-y)**2)**0.5 for coord1 in selected_coords_so_far]),x,y)
if hit_here=="no":
#print(min([((coord1[0]-x)**2+(coord1[1]-y)**2)**0.5 for coord1 in selected_coords_so_far]),x,y)
break
if (time.time()-start_time)>1:
print("\nWarning: It is getting difficult to place parent galaxies. Please considering reducing the maximum spacing between pairs. Figuring out the possible coordinates...")
impossible_coords1=[]
for coord1 in selected_coords_so_far:
box=get_circle_coords(int(pairs[-1][0]),int(coord1[0]),int(coord1[1]))
impossible_coords=[(q[1],q[0]) for q in box]
[impossible_coords1.append(r) for r in impossible_coords]
#impossible_coords=[get_circle_coords(int(pairs[-1][0]),int(coord1[0]),int(coord1[1]),possible_coords) for coord1 in selected_coords_so_far]
#impossible_coords=sum(impossible_coords, [])
print("Figured out the impossible coordinates")
#impossible_coords1=sorted([(coor[1],coor[0]) for coor in impossible_coords])
possible_coords=sorted(list(set(possible_coords)-set(impossible_coords1)))
print("Figured out the new possible coordinates, of which there are: "+str(len(possible_coords)))
start_time=time.time()
if len(possible_coords)==0:
print("There aren't any places where its possible to place a parent galaxy. You should reduce the maximum distance if you want to add more parents. Moving on to child galaxies")
print("\nCould only add: "+str(i-1))
parents=i-1
how_many_to_add=2*(i-1)
time.sleep(10)
break
#if hit_here=="yes" and (time.time()-start_time)>2:
# coords_to_exclude=[get_circle_coords(int(pairs[-1][0]),int(inputs_dict[id]["x_global"]),int(inputs_dict[id]["y_global"]),possible_coords) for id in sorted(inputs_dict.keys()) if id!="seed" and id<=i]
# coords_to_exclude=[(coords_to_exclude[l][1],coords_to_exclude[l][0]) for l in range(len(coords_to_exclude))]
# possible_coords=list(set(possible_coords)-set(coords_to_exclude))
# print(possible_coords)
if make_pairs=="yes" and i>how_many_to_add/2:
possible_child_coords=get_circle_coords(int(pairs[n-1][0]),int(inputs_dict[n]["x_global"]),int(inputs_dict[n]["y_global"]))
possible_child_coords=[l for l in possible_child_coords if round(l[2])==int(pairs[n-1][0])]
if not_near_edge=="yes":
###This is disgusting, if this throws an error, put it in a try statement as I did above, it might fail because image_data might be out of bound
possible_child_coords=[l for l in possible_child_coords if round(l[2])==int(pairs[n-1][0]) and image_data[l[1]+clearance,l[0]+clearance]!=0 and image_data[l[1]-clearance,l[0]+clearance]!=0 and image_data[l[1]+clearance,l[0]-clearance]!=0 and image_data[l[1]-clearance,l[0]-clearance]!=0 and l[0]-clearance>0 and l[1]-clearance>0]
idx = random.randint(0,len(possible_child_coords)-1)
x = possible_child_coords[idx][0]
y = possible_child_coords[idx][1]
selected_coords_so_far=[(int(inputs_dict[id]["x_global"]),int(inputs_dict[id]["y_global"])) for id in sorted(inputs_dict.keys()) if id!="seed" and id<i and id!=i-how_many_to_add/2]
start_time=time.time()
while True:
skip="no"
hit_here="no"
for coord in selected_coords_so_far:
distance_val=((coord[0]-x)**2+(coord[1]-y)**2)**0.5
try:
image_data[y,x]
except:
distance_val=0
#print("hit child except")
if distance_val<=pairs[-1][0] or image_data[y,x]==0 or y<0 or x<0:
#print("within radius",distance_val,x,y)
idx = random.randint(0,len(possible_child_coords)-1)
x = possible_child_coords[idx][0]
y = possible_child_coords[idx][1]
hit_here="yes"
break
#print(min([((coord1[0]-x)**2+(coord1[1]-y)**2)**0.5 for coord1 in selected_coords_so_far]),x,y)
if (time.time()-start_time)>1:
start_time=time.time()
print("Warning: It is getting difficult to place child galaxies")
for indexo in range(len(possible_child_coords)):
hit_here="no"
x,y=possible_child_coords[indexo][0],possible_child_coords[indexo][1]
#print("Trying "+str(x)+" "+str(y))
for coord in selected_coords_so_far:
distance_val=((coord[0]-x)**2+(coord[1]-y)**2)**0.5
try:
image_data[y,x]
except:
distance_val=0
if distance_val<=pairs[-1][0] or image_data[y,x]==0 or y<0 or x<0:
#print("within radius",distance_val,x,y)
#print("Condition met for "+str(x)+" "+str(y))
idx = random.randint(0,len(possible_child_coords)-1)
x = possible_child_coords[idx][0]
y = possible_child_coords[idx][1]
hit_here="yes"
break ###ADDRESS
#print("Should be trying the next coordinates")
#time.sleep(0.5)
if hit_here=="yes":
skip="yes"
print("Skipping "+str(i)+" because there is no place for it to go")
#sys.exit()
break
if hit_here=="no":
break
n=n+1
#coords_to_exclude=[get_circle_coords(int(pairs[-1][0]),int(inputs_dict[id]["x_global"]),int(inputs_dict[id]["y_global"]),possible_coords) for id in sorted(inputs_dict.keys()) if id!="seed" and id<=i]
#coords_to_exclude=[(coords_to_exclude[l][1],coords_to_exclude[l][0]) for l in range(len(coords_to_exclude))]
#possible_coords=list(set(possible_coords)-set(coords_to_exclude))
if skip=="no":
inputs_dict[i].update({"x_global":x,"y_global":y})
if (cutout_size)%2!=0:
print("Cutout_size must be even")
break
xmin,xmax = x-cutout_size/2-1,x+cutout_size/2-1
ymin,ymax = y-cutout_size/2-1,y+cutout_size/2-1
xmini,ymini,xmaxi,ymaxi=0,0,cutout_size,cutout_size
xmin_org = xmin
ymin_org = ymin
#print(cutout_array.shape)
#print(x,y)
#print(xmin,xmax)
#print(ymin,ymax)
### Could have done a try except statement here but it would omit galaxies cut off at edge of image
if xmin<0:
print("Galaxy #"+str(i)+" was off edge"+" xmin was:"+str(xmin)+" new xmin:"+str(abs(xmin)))
xmini=abs(xmin)
#cutout_array=cutout_array[:,xmini:]
#print(cutout_array.shape)
xmin=0
if ymin<0:
print("Galaxy #"+str(i)+" was off edge"+" ymin was:"+str(ymin)+" new ymin:"+str(abs(ymin)))
ymini=abs(ymin)
#cutout_array=cutout_array[ymini:,:]
#print(cutout_array.shape)
ymin=0
if xmax>large_field_xdim:
print("Galaxy #"+str(i)+" was off edge"+" xmax was:"+str(xmax)+" new xmax:"+str(large_field_xdim-xmin_org))
xmaxi=(xmax-x+1)+(large_field_xdim-x+1)
#cutout_array=cutout_array[:,:xmaxi]
#print(cutout_array.shape)
xmax=large_field_xdim
if ymax>large_field_ydim:
print("Galaxy #"+str(i)+" was off edge"+" ymax was:"+str(ymax)+" new ymax:"+str(large_field_ydim-ymin_org))
ymaxi=(ymax-y+1)+(large_field_ydim-y+1)
#cutout_array=cutout_array[:ymaxi,:]
#print(cutout_array.shape)
ymax=large_field_ydim
added_image_array[ymin:ymax,xmin:xmax]=added_image_array[ymin:ymax,xmin:xmax]+cutout_array[ymini:ymaxi,xmini:xmaxi]
cutout.close()
if add_models_verbose=="yes":
print("\nFinished adding galaxy #"+str(i)+" at "+str(x)+" "+str(y)+" to "+image_to_add_gals)
if label=="yes": ### Not best way to do this but putting it outside loop, would need to make lists, etc.
if label_verbose=="yes":
print("Making label for "+str(i))
file1=open("region_file.reg","a+")
file1.write("text "+str(x+3)+" "+str(y+3)+" # text ={"+str(i)+"}"+'\n')
file1.close()
if skip=="yes":
skipped_ids.append(i)
i=i+1
if make_pairs=="yes":
for d in skipped_ids:
del inputs_dict[d]
#if it broke at 688, but you wanted to add 800, it deletes 689 through 800 cause they don't exist
for d in range(how_many_to_add+1,how_many_to_add_copy+1):
del inputs_dict[d]
np.save(inputs_dict_file,inputs_dict)
added_image_array[image_data==0]=0 #mask zero values so we don't get square cutouts on diagonal edges
added_image.close()
#hdulist.close()
if add_mknoise_large2=="no" and add_models=="yes":
final_large_image=image_to_add_gals
if add_mknoise_large2=="yes" and add_models=="yes":
print("Running mknoise2 (after galaxies have been added)")
"""