RunSnowGlobes.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 ################################## Preamble
4 print "\n--- Running snowglobes"
5 from os import system
6 from glob import glob
7 import ROOT
8 ROOT.gROOT.SetBatch(1)
9 ################################## Parse command line options
10 from optparse import OptionParser
11 parser = OptionParser()
12 parser.add_option("-f", "--input_file", help="input file name", action="store", type="str", dest="input", default="garching")
13 parser.add_option("-m", "--milliblock_limit",help="limit on the number of milli-blocks to store", action="store", type="int", dest="mb_limit", default=400)
14 parser.add_option("-v", "--verbose_mode", help="turn on verbose mode", action="store_true", dest="verbose", default=False)
15 (options, args) = parser.parse_args()
16 print "run : --- Options:"
17 print "run : input: ",options.input
18 print "run : mb_limit: ",options.mb_limit
19 print "run : verbose mode: ",options.verbose
20 ################################## Functions
22  return_values = []
23  lines = open(file,"r").readlines()
24  assert(len(lines)==202)
25  for i,line in enumerate(lines):
26  if i >= 200:continue
27  #print line.strip()
28  data = float(line.split()[1])
29  return_values.append(data)
30  pass
31  assert(len(return_values)==200)
32  return return_values
33 ################################## Parameters
34 DATA_DIRECTORY = "/nova/app/users/jvasel/private/projects/supernova/Matts-code/data"
35 FLUX_DIRECTORY = "/nova/app/users/jvasel/private/projects/supernova/Matts-code/out"
36 OUT_DIRECTORY = "/nova/app/users/jvasel/private/projects/supernova/Matts-code/sg-out"
37 DISTANCE = "10"
38 CHANNELS = "nova_soup"
39 DETECTOR = "novaFD"
40 INTERACTIONS = ["ibd", "nue", "nue_C12", "nuebar_C12"]
41 ################################## Get data files
42 #raw_data_files = glob("%s/SN_fluence_%s_kPc_mb_*.dat"%(FLUX_DIRECTORY,DISTANCE))
43 #raw_data_files = glob("%s/SN_fluence_garching_%s_kPc*.dat"%(FLUX_DIRECTORY,DISTANCE))
44 raw_data_files = glob("%s/SN_fluence_%s_%s_kPc*.dat"%(FLUX_DIRECTORY,options.input,DISTANCE))
45 print "run : found %i files"%len(raw_data_files)
46 data_files = []
47 for d in raw_data_files:
48  print d
49  mb = int(d[len(FLUX_DIRECTORY)+1:len(d)-4].split("_").pop())
50  print "run : mb: ",mb
51  if mb > options.mb_limit: continue
52  this_d = d[len(FLUX_DIRECTORY)+1:len(d)-4]
53  data_files.append(this_d)
54 #print data_files
55 remove_all = True
56 if remove_all:
57  for d in data_files:
58  if "all" in d: data_files.remove(d)
59 #data_files = data_files[0:1]
60 print "run : filtered %i files"%len(data_files)
61 ################################## Run snowglobes for each of these
62 interaction_pdfs = {}
63 for i in INTERACTIONS: interaction_pdfs[i] = {}
64 for d in data_files:
65  print "run : running data: ",d
66  # run snowglobes
67  print "Running supernova.pl"
68  print d
69  command = "./supernova.pl %s %s %s"%(d,CHANNELS,DETECTOR)
70  system(command)
71  # make event tables
72  print "Running make_event_table.pl"
73  command = "./make_event_table.pl %s %s %s"%(d,CHANNELS,DETECTOR)
74  system(command)
75  # identify the mb and collect the output data
76  mb = int(d.split("_").pop())
77  print "run : mb: ",mb
78  output_files = glob("%s/*%s_*%s_events_smeared.dat"%(OUT_DIRECTORY,d,DETECTOR))
79  #print output_files
80  print "run : found %i output files"%len(output_files)
81  # loop over each interaction type and find the output file
82  for i in INTERACTIONS:
83  this_out = []
84  for o in output_files:
85  if i in o: this_out.append(o)
86  print this_out
87  assert(len(this_out) > 0)
88  pdf = ParseSnowGlobesEventTable(this_out[0])
89  interaction_pdfs[i][mb] = pdf
90 ################################## Finalise
91 #print "run : --- Output c++"
92 #for i in INTERACTIONS:
93  #print "run : Interaction type: %s"%i
94  #mbs = interaction_pdfs[i].keys()
95  #print "run : found %i mbs"%len(mbs)
96  #mbs.sort()
97  #print mbs
98  ## check that these are continous and complete
99  #assert(mbs[0]==1.)
100  #assert(mbs[-1]==len(mbs))
101  ##print "static const int fNtimePoints = %i;"%len(mbs)
102  ##print "static const int fNenergyBins = 200;"
103  #output_file = open("pdf.txt", "w")
104  #output_file.write("PositronPDF: ")
105  #output_file.write("[")
106  #for mb in mbs:
107  #string = " ["
108  #for p in interaction_pdfs[i][mb]:
109  #if p!=interaction_pdfs[i][mb][-1]:
110  #string+="%e,"%p
111  #else:
112  #string+="%e"%p
113  #if mb != mbs[-1]:
114  #string+="],"
115  #else:
116  #string+="]"
117  #output_file.write(string+"\n")
118  #output_file.write("]")
119 print "run : --- Output root file"
120 root_output_name = "%s_pdfs.root"%options.input
121 print "run : Writing histograms to file: %s"%root_output_name
122 root_file = ROOT.TFile(root_output_name, "RECREATE")
123 for i in INTERACTIONS:
124  print "run : Interaction type: %s"%i
125  mbs = interaction_pdfs[i].keys()
126  print "run : found %i mbs"%len(mbs)
127  mbs.sort()
128  #print mbs
129  assert(mbs[0]==1.)
130  assert(mbs[-1]==len(mbs))
131  n_pdfs = len(interaction_pdfs[i][1])
132  histogram = ROOT.TH2F(i, "%s;Time [mb];Energy [MeV];PDF;"%i,len(mbs),0.5,len(mbs)+0.5,n_pdfs,0.5,n_pdfs+0.5)
133  for j,mb in enumerate(mbs):
134  assert(len(interaction_pdfs[i][mb]) == n_pdfs)
135  for k,p in enumerate(interaction_pdfs[i][mb]):
136  histogram.SetBinContent(j+1,k+1,p)
137  histogram.Write()
138 root_file.Close()
139 print "run : Done\n"
void split(double tt, double *fr)
keys
Reco plots.
Definition: caf_analysis.py:46
system("rm -rf microbeam.root")
procfile open("FD_BRL_v0.txt")
def ParseSnowGlobesEventTable(file)
Functions.
assert(nhit_max >=nhit_nbins)