forked from cositools/cosi-data-challenge-1
-
Notifications
You must be signed in to change notification settings - Fork 0
/
COSIpy_dc1.py
1527 lines (1177 loc) · 62.4 KB
/
COSIpy_dc1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import ROOT as M
import numpy as np
import sys
import os
import matplotlib.pyplot as plt
import matplotlib as mpl
from tqdm.autonotebook import tqdm
from IPython.display import HTML
import warnings
warnings.filterwarnings('ignore')
# Load MEGAlib into ROOT
M.gSystem.Load("$(MEGAlib)/lib/libMEGAlib.so")
# Initialize MEGAlib
G = M.MGlobal()
G.Initialize()
class COSIpy:
def __init__(self, data_dir, filename):
self.data_dir = data_dir
self.filename = filename
self.horizon = 60 # field of view
self.dataset = dataset()
def read_COSI_DataSet(self):
"""
Reads in MEGAlib .tra (or .tra.gz) file (self.filename) in given directory (self.data_dir)
Returns COSI data set as a dictionary of the form
COSI_DataSet = {'Full filename':self.data_dir+'/'+self.filename,
'Energies':erg,
'TimeTags':tt,
'Xpointings':np.array([lonX,latX]).T,
'Ypointings':np.array([lonY,latY]).T,
'Zpointings':np.array([lonZ,latZ]).T,
'Phi':phi,
'Chi local':chi_loc,
'Psi local':psi_loc,
'Distance':dist,
'Chi galactic':chi_gal,
'Psi galactic':psi_gal}
:param: data_dir Path of directory in which data set is stored
:param: filename .tra or tra.gz file to read in
"""
# read COSI data starts here
# check if file exists
Reader = M.MFileEventsTra()
if Reader.Open(M.MString(self.data_dir+'/'+self.filename)) == False:
print("Unable to open file " + self.data_dir+'/'+self.filename + ". Aborting!")
else:
# initialise empty lists to store to
# total photon energy
erg = []
# Time tag in UNIX time
tt = []
# Event Type (0: CE; 4:PE; ...)
et = []
# latitude of X direction of spacecraft
latX = []
# lontitude of X direction of spacecraft
lonX = []
# latitude of Z direction of spacecraft
latZ = []
# longitude of Z direction of spacecraft
lonZ = []
# Compton scattering angle
phi = []
# measured data space angle chi (azimuth direction; 0..360 deg)
chi_loc = []
# measured data space angle psi (polar direction; 0..180 deg)
psi_loc = []
# First lever arm distance in cm
dist = []
# measured gal angle chi (lon direction)
chi_gal = []
# measured gal angle psi (lat direction)
psi_gal = []
# browse through .tra file, select events, and sort into corresponding list
while True:
# the Reader class from MEGAlib knows where an event starts and ends and
# returns the Event object which includes all information of an event
Event = Reader.GetNextEvent()
if not Event:
break
# here only select Compton events (will add Photo events later as optional)
# all calculations and definitions taken from:
# /MEGAlib/src/response/src/MResponseImagingBinnedMode.cxx
# Total Energy
erg.append(Event.Ei())
# Time tag in UNIX seconds
tt.append(Event.GetTime().GetAsSeconds())
# Event type (0 = Compton, 4 = Photo)
et.append(Event.GetEventType())
# x axis of space craft pointing at GAL latitude
latX.append(Event.GetGalacticPointingXAxisLatitude())
# x axis of space craft pointing at GAL longitude
lonX.append(Event.GetGalacticPointingXAxisLongitude())
# z axis of space craft pointing at GAL latitude
latZ.append(Event.GetGalacticPointingZAxisLatitude())
# z axis of space craft pointing at GAL longitude
lonZ.append(Event.GetGalacticPointingZAxisLongitude())
# note that the y axis can be calculated from the X and Z components
# therefore it is not saved, and will be computed further down
if Event.GetEventType() == M.MPhysicalEvent.c_Compton:
# Compton scattering angle
phi.append(Event.Phi())
# data space angle chi (azimuth)
chi_loc.append((-Event.Dg()).Phi())
# data space angle psi (polar)
psi_loc.append((-Event.Dg()).Theta())
# interaction length between first and second scatter in cm
dist.append(Event.FirstLeverArm())
# gal longitude angle corresponding to chi
chi_gal.append((Event.GetGalacticPointingRotationMatrix()*Event.Dg()).Phi())
# gal longitude angle corresponding to chi
psi_gal.append((Event.GetGalacticPointingRotationMatrix()*Event.Dg()).Theta())
# because everything is built upon numpy arrays later, we will initialise them here
erg = np.array(erg)
tt = np.array(tt)
et = np.array(et)
latX = np.array(latX)
lonX = np.array(lonX)
# change longitudes to from 0..360 deg to -180..180 deg
lonX[lonX > np.pi] -= 2*np.pi
latZ = np.array(latZ)
lonZ = np.array(lonZ)
# change longitudes to from 0..360 deg to -180..180 deg
lonZ[lonZ > np.pi] -= 2*np.pi
phi = np.array(phi)
chi_loc = np.array(chi_loc)
# change azimuth angle to 0..360 deg
chi_loc[chi_loc < 0] += 2*np.pi
psi_loc = np.array(psi_loc)
dist = np.array(dist)
chi_gal = np.array(chi_gal)
psi_gal = np.array(psi_gal)
# construct Y direction from X and Z direction
lonlatY = construct_scy(np.rad2deg(lonX),np.rad2deg(latX),
np.rad2deg(lonZ),np.rad2deg(latZ))
lonY = np.deg2rad(lonlatY[0])
latY = np.deg2rad(lonlatY[1])
# avoid negative zeros
chi_loc[np.where(chi_loc == 0.0)] = np.abs(chi_loc[np.where(chi_loc == 0.0)])
# make observation dictionary
COSI_DataSet = {'Full filename':self.data_dir+'/'+self.filename,
'Energies':erg,
'TimeTags':tt,
'Xpointings':np.array([lonX,latX]).T,
'Ypointings':np.array([lonY,latY]).T,
'Zpointings':np.array([lonZ,latZ]).T,
'Phi':phi,
'Chi local':chi_loc,
'Psi local':psi_loc,
'Distance':dist,
'Chi galactic':chi_gal,
'Psi galactic':psi_gal}
self.dataset.data = COSI_DataSet
def plot_raw_spectrum(self,bins=100):
"""
Plot spectrum of the raw counts in the given data set.
Returns energy bin information as well as counts per bin.
:param: bins number of bins to be used for plotting, or specific defintion of energy bin edges
Default = 100 equally spaced bins
"""
try:
# histogramming the energies in the data set
self.dataset.energy_spec_data, self.dataset.energy_bin_edges = np.histogram(self.dataset.data['Energies'],bins=bins)
# bin minima and maxima
self.dataset.energy_bin_min = self.dataset.energy_bin_edges[0:-1]
self.dataset.energy_bin_max = self.dataset.energy_bin_edges[1:]
# center of bins
self.dataset.energy_bin_cen = 0.5*(self.dataset.energy_bin_max+self.dataset.energy_bin_min)
# half-width of bins
self.dataset.energy_bin_wid = 0.5*(self.dataset.energy_bin_max-self.dataset.energy_bin_min)
# number of energy bin
self.dataset.n_energy_bins = len(self.dataset.energy_bin_cen)
# make a plot
fig, ax = plt.subplots(1,1,figsize=(8,6))
#ax.plot(self.dataset.energy_bin_cen,self.dataset.energy_spec_data,where='mid')
for i in range(self.dataset.n_energy_bins):
if i == 0 :
ax.plot([self.dataset.energy_bin_min[i],
self.dataset.energy_bin_min[i]],
[0,
self.dataset.energy_spec_data[i]/self.dataset.energy_bin_wid[i]/2],marker='',linestyle='-',color='black')
elif i < self.dataset.n_energy_bins:
ax.plot([self.dataset.energy_bin_min[i],
self.dataset.energy_bin_min[i]],
[self.dataset.energy_spec_data[i-1]/self.dataset.energy_bin_wid[i-1]/2,
self.dataset.energy_spec_data[i]/self.dataset.energy_bin_wid[i]/2],marker='',linestyle='-',color='black')
ax.plot([self.dataset.energy_bin_max[i],
self.dataset.energy_bin_max[i]],
[self.dataset.energy_spec_data[i]/self.dataset.energy_bin_wid[i]/2,
0],marker='',linestyle='-',color='black')
ax.plot([self.dataset.energy_bin_min[i],
self.dataset.energy_bin_max[i]],
[self.dataset.energy_spec_data[i]/self.dataset.energy_bin_wid[i]/2,
self.dataset.energy_spec_data[i]/self.dataset.energy_bin_wid[i]/2],marker='',linestyle='-',color='black')
ax.set_xlabel('Energy [keV]')
ax.set_ylabel('Counts [cnts/keV]')
except AttributeError:
print('not working here ...')
def plot_elevation(self,l_src,b_src,name_src):
"""
Plot the elevation of a list of sources for the given data set.
Returns the zenith pointings in longituda and latitude as well as the respective time tags
:param: l_src list of longitude coordinates for sources to calculate elevation
:param: b_src list of latitude coordinates for sources to calculate elevation
:param: name_src name of respective sources
"""
try:
# extracting the pointing direction information (zenith direction for each time tag)
self.dataset.l_pointing, self.dataset.b_pointing = np.rad2deg(self.dataset.data['Zpointings'][:,0]), np.rad2deg(self.dataset.data['Zpointings'][:,1])
# extracting time tags for corresponding pointings
self.dataset.t_pointing = self.dataset.data['TimeTags']
# make a plot for
try:
# set up figure and axis
fig, ax = plt.subplots(1,1,figsize=(16,6))
# loop over sources
for l,b,name in zip(l_src,b_src,name_src):
# the elevation is just the edge of the field of view (= horizon) minus the angular distance to the source
tmp_elevation = self.horizon-angular_distance(l,b,self.dataset.l_pointing,self.dataset.b_pointing)
ax.plot(self.dataset.t_pointing,tmp_elevation,label=name,marker='.',color='k',linestyle='')
ax.set_xlabel('Unix time [s]')
ax.set_ylabel('Elevation above COSI horizon [deg]')
# indicating what the maximum elevation is (TS: the y-axis might be confusing)
ax.axhline(self.horizon,linestyle='--',color='black')
ax.text(np.median(minmax(self.dataset.t_pointing)),self.horizon,
'Zenith',horizontalalignment='center',verticalalignment='bottom')
ax.set_ylim(0,)
ax.legend()
except TypeError:
print('Input longitudes, latitudes, and names should be lists.')
except AttributeError:
print('not working here ...')
def plot_lightcurve(self):
try:
try:
self.dataset.light_curve = [len(self.dataset.data_time_tagged[i]['Indices'])/self.dataset.data_time_tagged[i]['DeltaTime'] for i in range(self.dataset.n_time_bins)]
self.dataset.times_bins = [0] + [self.dataset.data_time_tagged[i]['DeltaTime'] for i in range(self.dataset.n_time_bins)]
self.dataset.times_edges = np.cumsum(self.dataset.times_bins)
self.dataset.times_min = self.dataset.times_edges[0:-1]
self.dataset.times_max = self.dataset.times_edges[1:]
self.dataset.times_cen = 0.5*(self.dataset.times_max+self.dataset.times_min)
self.dataset.times_wid = 0.5*(self.dataset.times_max-self.dataset.times_min)
fig, ax = plt.subplots(1,1,figsize=(8,6))
ax.step(self.dataset.times_cen,self.dataset.light_curve,where='mid')
ax.set_xlabel('Seconds since UNIX second '+str('%.2f' % self.dataset.data['TimeTags'][0])+' [s]')
ax.set_ylabel('Count rate [cnts/s]')
except:
print('Time tags not binned, yet, binning for 1 hour intervals now ...')
self.dataset.time_binning_tags()
self.plot_lightcurve()
except AttributeError:
print('not working here ...')
@property
def available_attributes(self):
"""
Print available attributes of current instance of COSIpy analysis object.
"""
for key in self.__dict__.keys():
print(key)
@property
def available_methods(self):
"""
Print available methods (and attributes) of current instance.
"""
print([thing for thing in dir(self) if not thing.startswith('__') and not np.any(np.isin(list(self.__dict__.keys()),thing))])
def init_spi(self):
self.spi = SPI()
class dataset(COSIpy):
def __init__(self,name='name'):
#super().__init__(*args, **kwargs)
self.name = name
self.data = None
"""self.data = {'Full filename':self.data_dir+'/'+self.filename,
'Energies':None,
'TimeTags':None,
'Xpointings':None,
'Ypointings':None,
'Zpointings':None,
'Phi':None,
'Chi local':None,
'Psi local':None,
'Distance':None,
'Chi galactic':None,
'Psi galactic':None}"""
@property
def plot_( self ):
raise AttributeError( "'Bar' object has no attribute 'foo'" )
@property
def data_info(self):
"""
Prints tabularised information of the individual COSI_DataSet dictionary entries.
In particular mean, std, min, and max.
TS: might be using pandas data frame in the future because it's doing that automatically.
"""
# only if the data is already read in
if isinstance(self.data,dict):
# formatting the table
row_format ='{:>15}' * 5
# which file is used
print('Full filename'+':',self.data['Full filename'])
# first table row
print(row_format.format('',*['mean','std','min','max']))
# loop over remaining dictionary entries and calculate and print content
for key in self.data.keys():
# ignore strings (first entry)
if isinstance(self.data[key],str):
pass
else:
print(row_format.format(key+':',
str('%.3f' % np.mean(self.data[key])),
str('%.3f' % np.std(self.data[key])),
str('%.3f' % np.min(self.data[key])),
str('%.3f' % np.max(self.data[key]))))
else:
print('Data set not read in, use read_COSI_DataSet() first.')
def time_binning_tags(self,time_bin_size=3600,time_binning=None):
"""
Get COSI data reformatted to a data set per time bin.
Output is a dictionary of the form:
data = {'Full filename':COSI_Data['Full filename'],
'Bin number':b,
'Indices':tdx,
'DeltaTime':time}
:param: COSI_Data dictionary from read_COSI_DataSet
:param: time_bin_size equi-sized time intervals to bin select photons in data set
Default: 3600 (in units of seconds)
"""
self.init_time_bin_size = time_bin_size
self.n_time_bins = int(np.ceil(np.diff(minmax(self.data['TimeTags']))/self.init_time_bin_size))
s2b = 1./self.init_time_bin_size
self.last_bin_size = np.diff(minmax(self.data['TimeTags']))[0]-(self.n_time_bins-1)/s2b
self.data_time_tagged = []
for b in range(self.n_time_bins):
tdx = np.where( (minmin(self.data['TimeTags'])*s2b >= b) &
(minmin(self.data['TimeTags'])*s2b < b+1) )[0]
tmp_data = {'Full filename':self.data['Full filename'],
'Bin number':b,
'Indices':tdx,
'DeltaTime':self.init_time_bin_size if b < self.n_time_bins-1 else self.last_bin_size}
self.data_time_tagged.append(tmp_data)
self.times = TIME()
self.times.times_bins = [0] + [self.data_time_tagged[i]['DeltaTime'] for i in range(self.n_time_bins)]
self.times.n_time_bins = self.n_time_bins
self.times.times_edges = np.cumsum(self.times.times_bins)
self.times.times_min = self.times.times_edges[0:-1]
self.times.times_max = self.times.times_edges[1:]
self.times.times_cen = 0.5*(self.times.times_max+self.times.times_min)
self.times.times_wid = 0.5*(self.times.times_max-self.times.times_min)
self.times.n_ph_t = np.array([len(self.data_time_tagged[i]['Indices']) for i in range(self.times.n_time_bins)])
self.times.n_ph_dx = np.where(self.times.n_ph_t != 0)[0]
self.times.n_ph = len(self.times.n_ph_dx)
self.times.total_time = 2*np.sum(self.times.times_wid[self.times.n_ph_dx])
##########################################
## JB May 4, 2022: extend time_binning_tags function to take
# arbitrary time bins rather than one time bin size
def time_binning_tags_2(self,time_bin_size,time_binning=None):
"""
Get COSI data reformatted to a data set per time bin.
Output is a dictionary of the form:
data = {'Full filename':COSI_Data['Full filename'],
'Bin number':b,
'Indices':tdx,
'DeltaTime':time}
:param: COSI_Data dictionary from read_COSI_DataSet
:param: time_bin_size np.array of custom time bin edges
Default: none
"""
# general time bin size
# TS: January 25: can be set to one number or general shape
#if not isinstance(time_bin_size, (list, tuple, np.ndarray)):
self.init_time_bin_size = time_bin_size
self.n_time_bins = len(self.init_time_bin_size)-1
# time conversions seconds to bin-size
s2b = 1./np.diff(self.init_time_bin_size)
# calculate last time bin interval
# ( how much is left in the data set inside the last bin )
self.last_bin_size = np.diff(self.init_time_bin_size)[-1] #self.init_time_bin_size[-1]
# fill data (as formatted per time tagged of individual time bins of size time_bin_size)
self.data_time_tagged = []
for b in range(self.n_time_bins):
tdx = np.where( (minmin(self.data['TimeTags'])*s2b[b] >= b) &
(minmin(self.data['TimeTags'])*s2b[b] < b+1) )[0]
tmp_data = {'Full filename':self.data['Full filename'],
'Bin number':b,
'Indices':tdx,
'DeltaTime':np.diff(self.init_time_bin_size)[b] if b < self.n_time_bins else self.last_bin_size}
self.data_time_tagged.append(tmp_data)
self.times = TIME()
self.times.times_bins = [self.data_time_tagged[i]['DeltaTime'] for i in range(self.n_time_bins)]#-1)]
self.times.n_time_bins = self.n_time_bins
self.times.times_edges = time_bin_size #np.cumsum(self.times.times_bins)
self.times.times_min = self.times.times_edges[0:-1]
self.times.times_max = self.times.times_edges[1:]
self.times.times_cen = 0.5*(self.times.times_max+self.times.times_min)
self.times.times_wid = 0.5*(self.times.times_max-self.times.times_min)
# need to take into account empty time bins
self.times.n_ph_t = np.array([len(self.data_time_tagged[i]['Indices']) for i in range(self.times.n_time_bins)])
#self.times.n_ph_t = np.array([len(self.data_time_tagged[i]['Indices']) for i in range(self.times.n_time_bins-1)])
self.times.n_ph_dx = np.where(self.times.n_ph_t != 0)[0]
self.times.n_ph = len(self.times.n_ph_dx)
self.times.total_time = 2*np.sum(self.times.times_wid)
def init_binning(self,energy_bin_edges=np.linspace(100,1000,10),pixel_size=5.):
"""
Prepare data set to input energy, time, and angle information in a binned arrary
"""
# energy
self.energies = ENERGY(bin_edges=energy_bin_edges)
# Comptel data space
# pixel definitions
self.pixel_size = pixel_size # (in deg)
# pixel area in rad^2
self.pixel_area = np.deg2rad(self.pixel_size)**2
# number of fisbel bins
self.n_fisbel_bins = int(np.floor(4*np.pi/self.pixel_area))
# number of phi (Compton Scatter Angle) bins
self.n_phi_bins = int(180./self.pixel_size)
# psi, chi
self.fisbels = FISBEL(n_bins=self.n_fisbel_bins)
# phi (Compton scattering angle)
self.phis = CSA(n_bins=self.n_phi_bins)
def get_binned_data(self):
"""
Bin data according to definitions in init_binning()
"""
try:
# init data array
self.binned_data = np.zeros((self.times.n_ph,#self.times.n_time_bins,
self.energies.n_energy_bins,
self.phis.n_phi_bins,
self.fisbels.n_fisbel_bins))
ph_dx = 0
for t in range(self.times.n_time_bins):
if self.times.n_ph_t[t] != 0:
# use indexed photons for specific time interval
idx_tmp = self.data_time_tagged[t]['Indices']
# temporary CDS indexed for time interval
phi_tmp = self.data['Phi'][idx_tmp]
psi_tmp = self.data['Psi local'][idx_tmp]
chi_tmp = self.data['Chi local'][idx_tmp]
# and energies for indexed time interval
erg_tmp = self.data['Energies'][idx_tmp]
# because FISBEL is not monotonic in both dimensions, loop over FISBEL bins
for f in range(self.n_fisbel_bins):
# select 2D pixel range where photons fall in
fisbel_idx_tmp = np.where((chi_tmp >= self.fisbels.lon_min[f]) &
(chi_tmp < self.fisbels.lon_max[f]) &
(psi_tmp >= self.fisbels.lat_min[f]) &
(psi_tmp < self.fisbels.lat_max[f]))[0]
# multi-D histogramming of the events in energy and phi
tmp_hist = np.histogramdd(np.array([erg_tmp[fisbel_idx_tmp],
phi_tmp[fisbel_idx_tmp]]).T,
bins=np.array([self.energies.energy_bin_edges,
self.phis.phi_edges]))#,
# fill into binned_data array
self.binned_data[ph_dx,:,:,f] = tmp_hist[0]
ph_dx += 1
except AttributeError:
print('Something is not defined; run init_binning() first')
def bin_for_angles(self,binned_array=None):
"""
extract phi, psi, and chi information per time and energy
"""
# temporary angle bin definition
# chi
ll = self.fisbels.lon_cen
dll = self.fisbels.lon_wid
# psi
bb = self.fisbels.lat_cen
dbb = self.fisbels.lat_wid
# phi
pp = self.phis.phi_cen
dpp = self.phis.phi_wid
n_pp = len(pp)
# find indices for psi and chi
uniq_bb = np.unique(bb)
n_bb = len(uniq_bb)
bb_idx = []
for i in range(n_bb):
bb_idx.append(np.where(bb == uniq_bb[i])[0])
uniq_ll = np.unique(ll)
n_ll = len(uniq_ll)
ll_idx = []
for i in range(n_ll):
ll_idx.append(np.where(ll == uniq_ll[i])[0])
if np.any(binned_array == None):
self.phi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_pp))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_pp):
self.phi_binned[t,e,i] = np.sum(self.binned_data[t,e,i,:])
self.psi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_bb))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_bb):
self.psi_binned[t,e,i] = np.sum(self.binned_data[t,e,:,bb_idx[i]])
self.chi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_ll))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_ll):
self.chi_binned[t,e,i] = np.sum(self.binned_data[t,e,:,ll_idx[i]])
else:
phi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_pp))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_pp):
phi_binned[t,e,i] = np.sum(binned_array[t,e,i,:])
psi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_bb))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_bb):
psi_binned[t,e,i] = np.sum(binned_array[t,e,:,bb_idx[i]])
chi_binned = np.zeros((self.times.n_time_bins,
self.energies.n_energy_bins,
n_ll))
for t in range(self.times.n_time_bins):
for e in range(self.energies.n_energy_bins):
for i in range(n_ll):
chi_binned[t,e,i] = np.sum(binned_array[t,e,:,ll_idx[i]])
return phi_binned, psi_binned, chi_binned
def plot_raw_spectrum(self,mode='total'):
"""
Plot spectrum of the raw counts in the given (binned) data set.
"""
try:
#if (self.binned_data.shape[0] != self.times.n_time_bins):
if (self.binned_data.shape[0] != self.times.n_ph):
print('The binning was updated since the last call! Run time_binning_tags() for your current binning.')
else:
if mode == 'total':
self.energies.energy_spec_data = np.sum(self.binned_data,axis=(0,2,3)).reshape(1,self.energies.n_energy_bins)
#self.times.total_time = np.sum(self.times.times_wid*2)
self.energies.energy_spec_data /= self.times.total_time
elif mode == 'all':
self.energies.energy_spec_data = np.sum(self.binned_data,axis=(2,3))
self.energies.energy_spec_data /= (self.times.times_wid[:,None]*2)
else:
print('Plotting spectrum for time bin number '+str(mode))
self.energies.energy_spec_data = np.sum(self.binned_data,axis=(2,3))[mode,:].reshape(1,self.energies.n_energy_bins)
self.energies.energy_spec_data /= (self.times.times_wid[mode]*2)
self.energies.energy_spec_data /= self.energies.energy_bin_wid
self.energies.energy_spec_data /= 2 # 2 for half bin widths
# make a plot
fig, ax = plt.subplots(1,1,figsize=(8,6))
for t in range(self.energies.energy_spec_data.shape[0]):
for i in range(self.energies.n_energy_bins):
if i == 0 :
ax.plot([self.energies.energy_bin_min[i],
self.energies.energy_bin_min[i]],
[0,
self.energies.energy_spec_data[t,i]],marker='',linestyle='-',color='black')
elif i < self.energies.n_energy_bins:
ax.plot([self.energies.energy_bin_min[i],
self.energies.energy_bin_min[i]],
[self.energies.energy_spec_data[t,i-1],
self.energies.energy_spec_data[t,i]],marker='',linestyle='-',color='black')
ax.plot([self.energies.energy_bin_max[i],
self.energies.energy_bin_max[i]],
[self.energies.energy_spec_data[t,i],
0],marker='',linestyle='-',color='black')
ax.plot([self.energies.energy_bin_min[i],
self.energies.energy_bin_max[i]],
[self.energies.energy_spec_data[t,i],
self.energies.energy_spec_data[t,i]],marker='',linestyle='-',color='black')
ax.set_xlabel('Energy [keV]')
ax.set_ylabel('Counts [cnts/s/keV]')
if (mode == 'total') | (mode == 'all'):
text_time = self.times.total_time
else:
text_time = self.times.times_wid[mode]*2
ax.text(0.7,0.8,
'Time bin: '+str(mode)+
'\n ('+str('%.1f' % text_time)+' s)',
transform=ax.transAxes,fontsize=16)
except AttributeError:
print('Data not yet binned? Use get_binned_data() first.')
def plot_lightcurve(self,mode='total'):
"""
Plotting the light curve of a binned data set, either for the total spectrum, all energy bins, or individually
:option: energy 'total': plotting sum over angles, and energies (default)
'all': plotting light curve of all energy bins on top of each other
int: number of energy bin for which the light curve should be plotted
"""
try:
if (self.binned_data.shape[0] != self.times.n_ph):
print('The binning was updated since the last call! Run time_binning_tags() for your current binning.')
else:
# sum over angles to get only time and energy array
self.lc_all = np.sum(self.binned_data,axis=(2,3))
#self.lc_all = self.lc_all/self.times.total_time
self.lc_all = self.lc_all/self.init_time_bin_size # normalize by bin time
if mode == 'total':
self.light_curve = np.sum(self.lc_all,axis=1)
elif mode == 'all':
self.light_curve = np.copy(self.lc_all)
else:
self.light_curve = self.lc_all[:,mode]
fig, ax = plt.subplots(1,1,figsize=(8,6))
ax.step(self.times.times_cen[self.times.n_ph_dx],self.light_curve,where='mid',color="black")
ax.set_xlabel('Seconds since UNIX second '+str('%.2f' % self.data['TimeTags'][0])+' [s]')
ax.set_ylabel('Count rate [cnts/s]')
if (mode == 'total') | (mode == 'all'):
text_energy = minmax(self.energies.energy_bin_edges)
else:
text_energy = np.array([self.energies.energy_bin_min[mode],
self.energies.energy_bin_max[mode]])
ax.text(0.6,0.8,
'Energy bin: '+str(mode)+
'\n ('+str('%.1f-%.1f keV' % (text_energy[0],text_energy[1]))+')',
transform=ax.transAxes,fontsize=16)
except AttributeError:
print('Data not yet binned? Use get_binned_data() first.')
class TIME(dataset):
def __init__(self):
pass
class ENERGY(dataset):
def __init__(self,bin_edges=np.linspace(100,1000,10)):
self.energy_bin_edges = bin_edges
self.energy_bin_min = self.energy_bin_edges[0:-1]
self.energy_bin_max = self.energy_bin_edges[1:]
# center of bins
self.energy_bin_cen = 0.5*(self.energy_bin_max+self.energy_bin_min)
# half-width of bins
self.energy_bin_wid = 0.5*(self.energy_bin_max-self.energy_bin_min)
# number of energy bin
self.n_energy_bins = len(self.energy_bin_cen)
class CSA(dataset):
def __init__(self,n_bins=36):
self.n_phi_bins = n_bins
self.phi_edges = np.linspace(0,np.pi,n_bins+1)
self.phi_min = self.phi_edges[0:-1]
self.phi_max = self.phi_edges[1:]
self.phi_cen = 0.5*(self.phi_max+self.phi_min)
self.phi_wid = 0.5*(self.phi_max-self.phi_min)
class FISBEL(dataset):
# tolerance of binning (strange effects when binning in angle space)
tol = 6 #1e-6
def __init__(self,n_bins=1650):
self.fisbelbins = self.get_FISBEL_binning(n_bins=n_bins)
self.n_fisbel_bins = n_bins
self.lat_cen = (np.array(self.fisbelbins[0]))[:,0]#-np.pi/2.
self.lon_cen = (np.array(self.fisbelbins[0]))[:,1]#-np.pi
self.lat_wid = (np.array(self.fisbelbins[1]))[:,0].reshape(n_bins,)
self.lon_wid = (np.array(self.fisbelbins[1]))[:,1].reshape(n_bins,)
self.lat_min = np.around(self.lat_cen-self.lat_wid/2,self.tol)
self.lon_min = np.around(self.lon_cen-self.lon_wid/2,self.tol)
self.lat_max = np.around(self.lat_cen+self.lat_wid/2,self.tol)
self.lon_max = np.around(self.lon_cen+self.lon_wid/2,self.tol)
self.lat_edges = np.concatenate([np.array([0.]),self.lat_max])
self.lon_edges = np.concatenate([np.array([0.]),self.lon_max])
@staticmethod
def get_FISBEL_binning(n_bins=1650,lon_shift=0,verbose=False):
"""
MEGAlib's FISBEL spherical axis binning
/MEGAlib/src/global/misc/src/MBinnerFISBEL.cxx
Used to make images with more information (pixels) in the centre, and less at higher latitudes
// CASEBALL - Constant area, squares at equator, borders along latitude & longitude
// Rules:
// (1) constant area
// (2) squarish at equator
// (3) All borders along latitude & longitude lines
// (4) latitude distance close to equal
Don't know where "lon_shift" is actually required, keeping it for the moment
:param: n_bins number of bins to populate the sky (typically 1650 (~5 deg) or 4583 (~3 deg))
:param: lon_shift ???
:option: verbose True = more verbose output
Returns CoordinatePairs and respective Binsizes
"""
# if only one bin, full sky in one bin
if (n_bins == 1):
LatitudeBinEdges = [0,np.pi]
LongitudeBins = [1]
n_collars = 1
else:
FixBinArea = 4*np.pi/n_bins
SquareLength = np.sqrt(FixBinArea)
n_collars = np.int((np.pi/SquareLength-1)+0.5) + 2
# -1 for half one top AND Bottom, 0.5 to round to next integer
# +2 for the half on top and bottom
verb(verbose,'Number of bins: %4i' % n_bins)
verb(verbose,'Fix bin area: %6.3f' % FixBinArea)
verb(verbose,'Square length: %6.3f' % SquareLength)
verb(verbose,'Number of collars: %4i' % n_collars)
LongitudeBins = np.zeros(n_collars)
LatitudeBinEdges = np.zeros(n_collars+1)
# Top and bottom first
LatitudeBinEdges[0] = 0
LatitudeBinEdges[n_collars] = np.pi
# Start on top and bottom with a circular region:
LongitudeBins[0] = 1
LongitudeBins[n_collars - 1] = LongitudeBins[0]
LatitudeBinEdges[1] = np.arccos(1 - 2.0/n_bins)
LatitudeBinEdges[n_collars - 1] = np.pi - LatitudeBinEdges[1]
# now iterate over remaining bins
for collar in range(1,np.int(np.ceil(n_collars/2))):
UnusedLatitude = LatitudeBinEdges[n_collars-collar] - LatitudeBinEdges[collar]
UnusedCollars = n_collars - 2*collar
NextEdgeEstimate = LatitudeBinEdges[collar] + UnusedLatitude/UnusedCollars
NextBinsEstimate = 2*np.pi * (np.cos(LatitudeBinEdges[collar]) - np.cos(NextEdgeEstimate)) / FixBinArea
# roundgind
NextBins = np.int(NextBinsEstimate+0.5)
NextEdge = np.arccos(np.cos(LatitudeBinEdges[collar]) - NextBins*FixBinArea/2/np.pi)
# insert at correct position
LongitudeBins[collar] = NextBins
if (collar != n_collars/2):
LatitudeBinEdges[collar+1] = NextEdge
LongitudeBins[n_collars-collar-1] = NextBins
if (collar != n_collars/2):
LatitudeBinEdges[n_collars-collar-1] = np.pi - NextEdge
LongitudeBinEdges = []
for nl in LongitudeBins:
if (nl == 1):
LongitudeBinEdges.append(np.array([0,2*np.pi]))
else:
n_lon_edges = int(nl+1)
LongitudeBinEdges.append(np.linspace(0,2*np.pi,n_lon_edges))
CoordinatePairs = []
Binsizes = []
for c in range(n_collars):
for l in range(np.int(LongitudeBins[c])):
CoordinatePairs.append([np.mean(LatitudeBinEdges[c:c+2]),np.mean(LongitudeBinEdges[c][l:l+2])])
Binsizes.append([np.diff(LatitudeBinEdges[c:c+2]),np.diff(LongitudeBinEdges[c][l:l+2])])
return CoordinatePairs,Binsizes
class Pointing():
def __init__(self,
dataset=None,
angle_threshold=5.):
self.angle_threshold = angle_threshold
self.construct_pointings(dataset)
def construct_pointings(self,dataset):
"""
Construct a list of 'stable' pointings during a steadily moving COSI observation:
Depending on the angular threshold, a number of triggers is collected into one time bin,
making an average observation, Xpointing, Ypointing, ZPointing, during this time.
This defines the exposure time (not dead-time corrected) for a particular pointing.
TS: need a cleaner pointing definition from ori files (or auxiliary information)
:param: COSI_Data.dataset COSI data set dictionary from read_COSI_DataSet()
:option: angle_threshold Default = 5 (deg): if COSI changes by more than angle_threshold, the accumulation
of triggers starts over, defining a new pointing.
Output: xpoins,ypoins,zpoins,dtpoins
Pointings in x-, y-, z-direction, and observation time (interval) for each set of triggers.
TS Warning: there could be something wrong, but i don't see where
"""
self.xpoins = []
self.ypoins = []
self.zpoins = []
self.dtpoins = []
self._n_ph = []
self._n_pointings = []
self._pointing_cuts = []
for t in range(dataset.n_time_bins):
# indices for this time bin
idx_tmp = dataset.data_time_tagged[t]['Indices']