launch_batch_jobs.py
Go to the documentation of this file.
1 print "run : --- Launch batch jobs"
2 ########################### Imports
3 import subprocess
4 import os
5 import time
6 ########################### Check that production validation is setup
7 assert "NOVAPRODVALID_DIR" in os.environ.keys(),"NOVAPRODVALID_DIR not set, try running setup_prod_valid.sh."
8 ########################### Option parser
9 from optparse import OptionParser
10 parser = OptionParser()
11 parser.add_option("-r", "--release", help="which art release to use", action="store", type=str, dest="release", default=False)
12 parser.add_option("-d", "--dry_run", help="dry run mode (don't run anything)", action="store_true", dest="dry_run", default=False)
13 parser.add_option("-b", "--debug", help="run jobs in debug mode", action="store_true", dest="debug", default=False)
14 parser.add_option("-s", "--short", help="run in short mode", action="store_true", dest="short", default=False)
15 parser.add_option("-m", "--message", help="add a message to info.pkl", action="store", type=str, dest="message", default="")
16 parser.add_option("-v", "--verbose", help="turn on verbose mode", action="store_true", dest="verbose", default=False)
17 (options, args) = parser.parse_args()
18 print "run : --- Options"
19 print "run : release: ",options.release
20 print "run : dry run mode: ",options.dry_run
21 print "run : debug mode: ",options.debug
22 print "run : short mode: ",options.short
23 print "run : message: ",options.message
24 print "run : verbose mode: ",options.verbose
25 assert options.release, "No release specified, provide with -r"
26 ########################### Make output directory
27 t_string = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
28 output_dir = "%s/production_testing/%s_%s/"%(os.environ["NOVAPRODVALID_GRID"], options.release, t_string)
29 print "run : output directory will be: %s"%output_dir
30 if not options.dry_run:
31  print "run : making output directory"
32  os.system("mkdir %s"%output_dir)
33 ########################### clean up local directory
34 os.system("rm %s/testing/*.fcl" %(os.environ["NOVAPRODVALID_DIR"]))
35 os.system("rm %s/testing/*.root" %(os.environ["NOVAPRODVALID_DIR"]))
36 os.system("rm %s/testing/*.json" %(os.environ["NOVAPRODVALID_DIR"]))
37 os.system("rm %s/testing/*.db" %(os.environ["NOVAPRODVALID_DIR"]))
38 os.system("rm %s/testing/*.csv" %(os.environ["NOVAPRODVALID_DIR"]))
39 os.system("rm %s/testing/output/*"%(os.environ["NOVAPRODVALID_DIR"]))
40 ########################### collect latest working directory
41 command = "tar czf %s/../production_python.tgz %s/"%(os.environ["NOVAPRODVALID_DIR"], os.environ["NOVAPRODVALID_DIR"])
42 print "run : tar command: %s"%command
43 if not options.dry_run: os.system(command)
44 ########################### Launch batch jobs
45 # jobsub_command = "jobsub -g %s/testing/scripts/batch-job.sh"%(os.environ["NOVAPRODVALID_DIR"]) # jobsub tools command
46 jobsub_command = "jobsub_submit -G nova --resource-provides=usage_model=DEDICATED,OPPORTUNISTIC --expected-lifetime=3500s --OS=SL6 --memory=3GB --disk=10GB file://%s/testing/scripts/batch-job.sh"%(os.environ["NOVAPRODVALID_DIR"])
47 ########################### configure chains
48 chains = [] # format: [chain configuration to run, long number of events, short number of events]
49 chains.append(["FD_data_cosmics" , 2015, 200])
50 chains.append(["FD_data_NuMI" , 138, 138])
51 chains.append(["FD_cosmics" , 200, 20])
52 chains.append(["FD_genie_FHC_nonswap", 1000, 30])
53 chains.append(["FD_genie_FHC_swap" , 1000, 30])
54 chains.append(["FD_genie_FHC_tau" , 1000, 30])
55 chains.append(["FD_genie_RHC_nonswap", 1000, 30])
56 chains.append(["FD_genie_RHC_swap" , 1000, 30])
57 chains.append(["FD_genie_RHC_tau" , 1000, 30])
58 chains.append(["ND_data_cosmics" , 3599, 200])
59 chains.append(["ND_data_NuMI" , 1952, 200])
60 chains.append(["ND_cosmics" , 250000, 2500])
61 chains.append(["ND_genie_FHC_nonswap", 2000, 30])
62 #chains.append(["ND_genie_RHC_nonswap", 2000, 30])
63 commands = []
64 i_evt = 1
65 if options.short: i_evt = 2
66 logs = []
67 compiler_flag = "maxopt"
68 if options.debug: compiler_flag = "debug"
69 for chain in chains:
70  logs.append([])
71  c = "%s %s %s %i %s %s"%(jobsub_command, options.release, chain[0], chain[i_evt], output_dir, compiler_flag)
72  commands.append(c)
73  print "run : command: %s"%c
74  if not options.dry_run:
75  good_submission = False
76  process = subprocess.Popen(c.split(" "), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
77  output = process.stdout
78  lines = output.readlines()
79  for line in lines:
80  print line.strip()
81  # if "submitted" in line: good_submission = True
82  if "JobsubJobId of first job" in line: good_submission = True
83  if good_submission:
84  print "run : submission successful"
85  for l in lines:
86  if ".cmd" in l: print "saving link to log: %s"%l
87  logs[-1].append(l.strip())
88  else:
89  assert False, "run : ERROR in submission"
90 
91  time.sleep(2)
92 ########################### pkl output
93 import cPickle
94 options.commands = commands
95 options.logs = logs
96 options.time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
97 output_name = output_dir+"info.pkl"
98 if not options.dry_run:
99  print "run : Saved info as %s"%output_name
100  cPickle.dump(options,open(output_name,"wb"))
101 ########################### done
102 print "run : done"
void append()
Definition: append.C:24
procfile open("FD_BRL_v0.txt")