ConvertToSnowGlobesInput.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 import SupernovaUtilities as utils
3 import ROOT
4 ROOT.gROOT.SetBatch(1)
5 import math
6 
7 # Begin
8 print "\nrun : --- Convert a parsed flux file into snowglobes input format"
9 
10 # Parse command line options
11 from optparse import OptionParser
12 parser = OptionParser()
13 parser.add_option("-m", "--model",
14  help="which mode to run, options are garching or livermore",
15  action="store",
16  type="str",
17  dest="model",
18  default="garching")
19 
20 parser.add_option("-d", "--distance",
21  help="star distance in kPc",
22  action="store",
23  type="float",
24  dest="distance",
25  default=10.)
26 
27 parser.add_option("-t", "--time_unit",
28  help="time unit in seconds",
29  action="store",
30  type="float",
31  dest="dT",
32  default=(1. / 200.))
33 
34 parser.add_option("-v", "--verbose_mode",
35  help="turn on verbose mode",
36  action="store_true",
37  dest="verbose",
38  default=False)
39 (options, args) = parser.parse_args()
40 
41 print "run : --- Options:"
42 print "run : model: ", options.model
43 print "run : distance [kPc]: ", options.distance
44 print "run : time unit [s]: ", options.dT
45 print "run : verbose mode: ", options.verbose
46 
47 assert (options.model in ["garching", "garching_alpha_25", "garching_alpha_2", "garching_FD", "livermore"]) or (
48  "SND" in options.model), "Unconfigured mode requested: %s" % options.model
49 
50 # Parameters
51 PARSEC = 30.857e17 # in centimeters
52 STAR_DISTANCE = options.distance * 1000. * PARSEC # in centimeters
53 LUMI_MAG = 1e50
54 SPHERE_SUFACE_AREA = 4. * math.pi * STAR_DISTANCE * STAR_DISTANCE
55 
56 # Read histograms
57 data_file_name = "./data/%s_fluxes.root" % options.model
58 print "run : Opening data file: %s" % data_file_name
59 data_file = ROOT.TFile.Open(data_file_name)
60 assert(data_file.IsOpen())
61 print "run : - open"
62 histograms = {}
63 for i in range(3):
64  histograms[i] = data_file.Get("Time_Energy_Fluence_%i" % i)
65  assert(histograms[i])
66  # check that the axes are compatible
67  if i != 0:
68  assert(histograms[i].GetNbinsX() == histograms[0].GetNbinsX())
69  assert(histograms[i].GetXaxis().GetXmax() == histograms[0].GetXaxis().GetXmax())
70  assert(histograms[i].GetXaxis().GetXmin() == histograms[0].GetXaxis().GetXmin())
71  assert(histograms[i].GetNbinsY() == histograms[0].GetNbinsY())
72  assert(histograms[i].GetYaxis().GetXmax() == histograms[0].GetYaxis().GetXmax())
73  assert(histograms[i].GetYaxis().GetXmin() == histograms[0].GetYaxis().GetXmin())
74 
75 # data produced from parameterised fits will have the correct energy binning for Snowglobes
76 # that is 501 bins from 0 to 100 MeV
77 # data from the SND won't, so we must first translate it
78 if "SND" in options.model:
79  for i in range(3):
80  histograms[i] = utils.ConvertTimeEnergyFluenceToSnowglobesFormat(histograms[i])
81 
82 # print out the bin ranges
83 time_bins = utils.GetBins(histograms[0].GetXaxis())
84 print "run : %i time bins from %f to %f s" %\
85  (len(time_bins), min(time_bins), max(time_bins))
86 energy_bins = utils.GetBins(histograms[0].GetYaxis())
87 print "run : %i energy bins from %f to %f MeV" %\
88  (len(energy_bins), min(energy_bins), max(energy_bins))
89 # cross check for equidistance of energy as this is what globes will expect
90 delta = False
91 for i, t in enumerate(energy_bins):
92  if i == 0:
93  continue
94  if i == 1:
95  delta = t - energy_bins[i - 1]
96  this_delta = t - energy_bins[i - 1]
97  # allow a little tolerance for floating point precision
98  if math.fabs(this_delta - delta) > 0.0001:
99  print "run : this delta [%f - %f] = %f (from %i,%i) != [%f - %f] = %f (from 0,1)" %\
100  (energy_bins[i], energy_bins[i - 1], this_delta, i, i - 1,
101  energy_bins[1], energy_bins[0], delta)
102  assert False
103 
104 # Write time tables
105 # We will produce a positron PDF for each unit of time we want to consider
106 # in general this unit should be the milliblock length (5 ms)
107 # but it can be made longer for speed and cross checking
108 # or shorter if we wanted to study the time dependence over a single milliblock
109 n_time_units = int(math.ceil((max(time_bins)) / options.dT))
110 print "run : time range spans %i time units" % n_time_units
111 
112 # make a list of time bin boundaries
113 # start at 0 even though some of the data files contain values for t < 0.
114 time_points = [0.]
115 for i in range(1, n_time_units + 1):
116  time_points.append(i * options.dT)
117 
118 # for each milliblock we should find the time bins that contribute to it
119 # and what fraction
120 print "run : --- Making time tables"
121 time_tables = []
122 # loop over the time points
123 for i_time, time in enumerate(time_points):
124  time_tables.append([])
125  # write the table header (not really necessary)
126  time_tables[i_time].append("## %10s %10s %10s %10s %10s %10s %10s\n" %
127  ("Energy [GeV]", "#NuE", "#NuMu", "#NuTau",
128  "#AntiNuE", "#AntiNuMu", "#AntiNuTau"))
129  if i_time == 0:
130  continue
131 
132  # find the time bins spanned & what fraction of each is included
133  min_time_bin = histograms[0].GetXaxis().FindBin(time_points[i_time - 1])
134  max_time_bin = histograms[0].GetXaxis().FindBin(time_points[i_time])
135  print "run : time range[%.3f -> %.3f] spans %2i bins [%3i -> %3i] %f -> %f" %\
136  (time_points[i_time - 1],
137  time_points[i_time],
138  (max_time_bin - min_time_bin) + 1,
139  min_time_bin,
140  max_time_bin,
141  histograms[0].GetXaxis().GetBinLowEdge(min_time_bin),
142  histograms[0].GetXaxis().GetBinUpEdge(max_time_bin))
143 
144  bin_fractions = []
145  bin_widths = []
146  for j, time_bin in enumerate(range(min_time_bin, max_time_bin + 1)):
147  bin_widths.append(histograms[0].GetXaxis().GetBinWidth(time_bin))
148  # if the bin isn't a boundary then there is 100% overlap
149  if j not in [0, (max_time_bin - min_time_bin)]:
150  bin_fractions.append(1.)
151  continue
152  # if it is then we must determine the degree of overlap with our time
153  # window
154  lower_limit = histograms[0].GetXaxis().GetBinLowEdge(time_bin)
155  if lower_limit < time_points[i_time - 1]:
156  lower_limit = time_points[i_time - 1]
157 
158  upper_limit = histograms[0].GetXaxis().GetBinUpEdge(time_bin)
159  if upper_limit > time_points[i_time]:
160  upper_limit = time_points[i_time]
161 
162  spanned_fraction = ((upper_limit - lower_limit) /
163  histograms[0].GetXaxis().GetBinWidth(time_bin))
164  # print "run : sub-bin[%3i] (%3i) range spanned: %f -> %f: %2.2f %%"%\
165  #(j, time_bin, lower_limit, upper_limit, spanned_fraction*100.)
166  # print time_bin,time_bins[time_bin-1]
167  bin_fractions.append(spanned_fraction)
168  pass # end of loop over spanned time bin
169 
170  if options.verbose:
171  print "run : bin fractions: ", ["%.2f" % b for b in bin_fractions]
172 
173  # cross check compute the spanned time
174  sum_time = 0.
175  for i_b, b in enumerate(bin_fractions):
176  sum_time += b * bin_widths[i_b]
177  if options.verbose:
178  print "run: total time spanned by these bins: %.2f s" % sum_time
179 
180  # for each energy bin work out the total fluence of each neutrino flavour
181  for energy_bin in range(1, histograms[0].GetNbinsY() + 2):
182  energy = histograms[0].GetYaxis().GetBinCenter(energy_bin)
183  energy_width = histograms[0].GetYaxis().GetBinWidth(energy_bin)
184  # loop over spanned time bins adding fluences
185  nu_e_fluence = 0.
186  anti_nu_e_fluence = 0.
187  nu_x_fluence = 0.
188  for j, time_bin in enumerate(range(min_time_bin, max_time_bin + 1)):
189  nu_e_fluence += histograms[0].GetBinContent(
190  time_bin, energy_bin) * bin_fractions[j]
191  anti_nu_e_fluence += histograms[1].GetBinContent(
192  time_bin, energy_bin) * bin_fractions[j]
193  nu_x_fluence += histograms[2].GetBinContent(
194  time_bin, energy_bin) * bin_fractions[j]
195  # multiply by luminosity magnitude and divide by sphere surface area
196  nu_e_fluence = (nu_e_fluence * LUMI_MAG) / SPHERE_SUFACE_AREA
197  anti_nu_e_fluence = (anti_nu_e_fluence * LUMI_MAG) / SPHERE_SUFACE_AREA
198  nu_x_fluence = (nu_x_fluence * LUMI_MAG) / SPHERE_SUFACE_AREA
199  # recall the nu_x fluence is mu + anti-mu + tau + anti-tau
200  # as snowglobes expects these seperated we divide our total fluence
201  # between the four
202  time_tables[i_time].append(" %10f %10e %10e %10e %10e %10e %10e\n" %
203  (energy / 1000., nu_e_fluence, nu_x_fluence / 4., nu_x_fluence / 4.,
204  anti_nu_e_fluence, nu_x_fluence / 4., nu_x_fluence / 4.))
205 # Finalise
206 write_dat_files = True
207 if write_dat_files:
208  print "run : Writing output tables"
209  for i_time, time in enumerate(time_points):
210  if i_time == 0:
211  continue
212  file_name = "out/SN_fluence_%s_%i_kPc_dT_%.1f_ms_mb_%i.dat" % (
213  options.model, options.distance, (options.dT * 1000.), i_time)
214  file = open(file_name, "w")
215  print "run : Saving to file: %s" % file_name
216  for line in time_tables[i_time]:
217  file.write(line)
218  file.close()
219 print "run : Done\n"
correl_xv GetYaxis() -> SetDecimals()
correl_xv GetXaxis() -> SetDecimals()
procfile open("FD_BRL_v0.txt")
static float min(const float a, const float b, const float c)
Definition: absgeo.cxx:45
assert(nhit_max >=nhit_nbins)
T max(sqlite3 *const db, std::string const &table_name, std::string const &column_name)
Definition: statistics.h:68